Exemplo n.º 1
0
        /// <summary>
        /// Handles deleting existing data files. Requires authentication and a special privilege.
        /// </summary>
        /// <param name="shandler"></param>
        /// <param name="request"></param>
        /// <param name="body"></param>
        /// <param name="encoder"></param>
        /// <returns></returns>
        public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder)
        {
            var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body);

            if (!auth_resp.success)
            {
                return(encoder.Response(403, "Denied").SendNothing());
            }

            if (!auth_resp.user.can_delete)
            {
                return(encoder.Response(403, "Denied").SendNothing());
            }

            var sreq = JsonConvert.DeserializeObject <DeleteRequest>(auth_resp.payload);

            var sid = sreq.sid;

            if (shandler.items.ContainsKey(sid))
            {
                var item = shandler.items[sid];

                try
                {
                    File.Delete(item.fqpath);
                } catch (Exception)
                {
                    await encoder.WriteQuickHeaderAndStringBody(
                        500, "Error", JsonConvert.SerializeObject(new DeleteResponse()
                    {
                        success = false,
                    })
                        );

                    return(Task.CompletedTask);
                }

                if (item.fqpath != null && item.fqpath.Length > 0)
                {
                    shandler.UsedSpaceSubtract((long)item.datasize);
                }

                item.fqpath = null;

                await shandler.WriteItemToJournal(item);
            }

            await encoder.WriteQuickHeaderAndStringBody(
                200, "Deleted", JsonConvert.SerializeObject(new DeleteResponse()
            {
                success = true,
            })
                );

            return(Task.CompletedTask);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Handles reading header and data for data upload. A critical routine that employs as many checks as needed
        /// to ensure that written data is verified as written and correct.
        /// </summary>
        /// <param name="shandler"></param>
        /// <param name="request"></param>
        /// <param name="body"></param>
        /// <param name="encoder"></param>
        /// <returns></returns>
        public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder)
        {
            var buf    = new byte[1024 * 32];
            int bufndx = 0;
            int cnt;
            int tndx;

            do
            {
                cnt = await body.ReadAsync(buf, bufndx, buf.Length - bufndx);

                if (cnt > 0)
                {
                    bufndx += cnt;
                }

                tndx = Array.IndexOf(buf, (byte)'\n');

                if (bufndx >= buf.Length && tndx < 0)
                {
                    throw new ProgramException("On receiving upload header. The header size exceeded 4096-bytes.");
                }
            } while (cnt > 0 && tndx < 0);

            var hdrstr = Encoding.UTF8.GetString(buf, 0, tndx).Trim();

            Debug.WriteLine(hdrstr);

            var auth_package = JsonConvert.DeserializeObject <MDACS.API.Auth.Msg>(hdrstr);

            var payload = auth_package.payload;


            /*var info = await MDACS.API.Auth.AuthenticateMessageAsync(shandler.auth_url, auth_package);
             *
             * if (!info.success)
             * {
             *  throw new UnauthorizedException();
             * }
             */

            var hdr = JsonConvert.DeserializeObject <MDACS.API.Requests.UploadHeader>(payload);

            Array.Copy(buf, tndx + 1, buf, 0, bufndx - (tndx + 1));
            // Quasi-repurpose the variable `bufndx` to mark end of the slack data.
            bufndx = bufndx - (tndx + 1);

            var data_node = String.Format("{0}_{1}_{2}_{3}.{4}",
                                          hdr.datestr,
                                          hdr.userstr,
                                          hdr.devicestr,
                                          hdr.timestr,
                                          hdr.datatype
                                          );

            var data_node_path = Path.Combine(shandler.data_path, data_node);

            // Make the name unique and keep pertinent information in the event something fails.
            var temp_data_node_path = Path.Combine(
                shandler.data_path,
                $"temp_{DateTime.Now.ToFileTime().ToString()}_{data_node}"
                );

            // TODO: hash data then rehash data after writing to storage, maybe?

            SHA512 hasher;

            var fhash_sha512 = new byte[512 / 8];

            FileStream fp = null;

            try
            {
#if DEBUG
                //Logger.WriteDebugString($"Opening {temp_data_node_path} as temporary output for upload.");
#endif
                fp = File.Open(temp_data_node_path, FileMode.Create);
                await fp.WriteAsync(buf, 0, bufndx);

                long total = 0;

                hasher = SHA512Managed.Create();

                hasher.Initialize();

                while (true)
                {
                    var _cnt = await body.ReadAsync(buf, 0, buf.Length);

                    if (_cnt < 1)
                    {
                        break;
                    }

                    Debug.WriteLine($"buf={buf} _cnt={_cnt}");

                    // https://stackoverflow.com/questions/20634827/how-to-compute-hash-of-a-large-file-chunk
                    hasher.TransformBlock(buf, 0, _cnt, null, 0);

                    total += _cnt;
                    await fp.WriteAsync(buf, 0, _cnt);
                }

                hasher.TransformFinalBlock(buf, 0, 0);

                fhash_sha512 = hasher.Hash;

#if DEBUG
                //Logger.WriteDebugString($"Wrote {total} bytes to {temp_data_node_path} as temporary output for upload.");
#endif

                await body.CopyToAsync(fp);

                await fp.FlushAsync();

                fp.Dispose();
            }
            catch (Exception ex)
            {
                if (fp != null)
                {
                    fp.Dispose();
                }

#if DEBUG
                //Logger.WriteDebugString($"Exception on {temp_data_node_path} with:\n{ex}");
#endif

                File.Delete(temp_data_node_path);

                await encoder.WriteQuickHeader(500, "Problem");

                await encoder.BodyWriteSingleChunk("Problem during write to file from body stream.");

                return(Task.CompletedTask);
            }

#if DEBUG
            //Logger.WriteDebugString($"Upload for {temp_data_node_path} is done.");
#endif

            if (!await WaitForFileSizeMatch(temp_data_node_path, (long)hdr.datasize, 3))
            {
                File.Delete(temp_data_node_path);

                await encoder.WriteQuickHeader(504, "Timeout");

                await encoder.BodyWriteSingleChunk("The upload byte length of the destination never reached the intended stream size.");

                return(Task.CompletedTask);
            }

            try
            {
                if (File.Exists(data_node_path))
                {
                    await CheckedFileMoveAsync(data_node_path, $"{data_node_path}.moved.{DateTime.Now.ToFileTime().ToString()}");
                }

                await CheckedFileMoveAsync(temp_data_node_path, data_node_path);
            }
            catch (Exception)
            {
                // Delete the temporary since we should have saved the original.
                File.Delete(temp_data_node_path);
                // Move the original back to the original filename.
                await CheckedFileMoveAsync($"{data_node_path}.moved.{DateTime.Now.ToFileTime().ToString()}", data_node_path);

                await encoder.WriteQuickHeader(500, "Problem");

                await encoder.BodyWriteSingleChunk("Unable to do a CheckFileMoveAsync.");

                return(Task.CompletedTask);
            }

            if (!await WaitForFileSizeMatch(data_node_path, (long)hdr.datasize, 3))
            {
                await encoder.WriteQuickHeader(504, "Timeout");

                await encoder.BodyWriteSingleChunk("Timeout waiting for size change.");

                return(Task.CompletedTask);
            }

            Item item = new Item();

            hasher = new SHA512Managed();

            var security_id_bytes = hasher.ComputeHash(Encoding.UTF8.GetBytes(data_node));

            item.datasize         = hdr.datasize;
            item.datatype         = hdr.datatype;
            item.datestr          = hdr.datestr;
            item.devicestr        = hdr.devicestr;
            item.duration         = -1.0;
            item.fqpath           = data_node_path;
            item.metatime         = DateTime.Now.ToFileTimeUtc();
            item.node             = data_node;
            item.note             = "";
            item.security_id      = BitConverter.ToString(security_id_bytes).Replace("-", "").ToLower();
            item.timestr          = hdr.timestr;
            item.userstr          = hdr.userstr;
            item.state            = "";
            item.manager_uuid     = shandler.manager_uuid;
            item.data_hash_sha512 = Convert.ToBase64String(fhash_sha512);

            item.duration = MDACS.Database.MediaTools.MP4Info.GetDuration(item.fqpath);

            await shandler.WriteItemToJournal(item);

            var uresponse = new MDACS.API.Responses.UploadResponse();

            uresponse.success     = true;
            uresponse.fqpath      = item.fqpath;
            uresponse.security_id = item.security_id;

            shandler.UsedSpaceAdd((long)hdr.datasize);

            await encoder.WriteQuickHeader(200, "OK");

            await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(uresponse));

            // Allow current execution to continue while spinning off the
            // a handler for this successful upload.
            shandler.HouseworkAfterUploadSuccess(item);

            return(Task.CompletedTask);
        }
Exemplo n.º 3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="request"></param>
        /// <param name="body"></param>
        /// <param name="encoder"></param>
        /// <exception cref="UnauthorizedException">User has insufficient priviledges to modify the item.</exception>
        /// <exception cref="AuthenticationException">User is not valid for access of any type.</exception>
        /// <exception cref="InvalidArgumentException">One of the arguments was not correct or the reason for failure.</exception>
        /// <exception cref="ProgramException">Anything properly handled but needs handling for acknowlegement purposes.</exception>
        /// <exception cref="Exception">Anything else could result in instability.</exception>
        public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder)
        {
            var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body);

            if (!auth_resp.success)
            {
                throw new UnauthorizedException();
            }

            var sreq = JsonConvert.DeserializeObject <API.Requests.CommitSetRequest>(auth_resp.payload);

            Monitor.Enter(shandler.items);

            Item item;

#if DEBUG
            //Logger.WriteDebugString($"sreq.security_id={sreq.security_id}");
#endif

            try
            {
                if (!shandler.items.ContainsKey(sreq.security_id))
                {
                    await encoder.WriteQuickHeader(404, "Not Found");

                    await encoder.BodyWriteSingleChunk("");

                    return(Task.CompletedTask);
                }

                item = shandler.items[sreq.security_id];
            }
            catch (Exception ex)
            {
#if DEBUG
                //Logger.WriteDebugString($"Exception on getting item was:\n{ex}");
#endif
                await encoder.WriteQuickHeader(500, "Error");

                await encoder.BodyWriteSingleChunk("");

                return(Task.CompletedTask);
            }
            finally
            {
                Monitor.Exit(shandler.items);
            }

            if (!Helpers.CanUserModifyItem(auth_resp.user, item))
            {
#if DEBUG
                //Logger.WriteDebugString($"User was not authorized to write to item.");
#endif
                await encoder.WriteQuickHeader(403, "Not Authorized");

                await encoder.BodyWriteSingleChunk("");

                return(Task.CompletedTask);
            }

            // Check fields to see if the user is authorized to modify them.
            foreach (var pair in sreq.meta)
            {
                if (!await shandler.FieldModificationValidForUser(auth_resp.user, pair.Key))
                {
                    await encoder.WriteQuickHeader(403, "Not Authorized");

                    await encoder.BodyWriteSingleChunk("");

                    return(Task.CompletedTask);
                }
            }

            try
            {
                foreach (var pair in sreq.meta)
                {
                    // Reflection simplified coding time at the expense of performance.
                    var field = item.GetType().GetField(pair.Key);
                    field.SetValue(item, pair.Value.ToObject(field.FieldType));

#if DEBUG
                    //Logger.WriteDebugString($"Set field {field} of {sreq.meta} to {pair.Value.ToString()}.");
#endif
                }

                shandler.items[sreq.security_id] = item;

                if (!await shandler.WriteItemToJournal(item))
                {
#if DEBUG
                    //Logger.WriteDebugString($"Error happened when writing to the journal for a commit set operation.");
#endif
                    await encoder.WriteQuickHeader(500, "Error");

                    await encoder.BodyWriteSingleChunk("");

                    return(Task.CompletedTask);
                }
            }
            catch (Exception)
            {
#if DEBUG
                //Logger.WriteDebugString($"Error happened when writing to journal or setting item fields during commit set operation.");
#endif
                await encoder.WriteQuickHeader(500, "Error");

                await encoder.BodyWriteSingleChunk("");

                return(Task.CompletedTask);
            }

            var resp = new MDACS.API.Responses.CommitSetResponse();

            resp.success = true;

            await encoder.WriteQuickHeader(200, "OK");

            await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp));

            return(Task.CompletedTask);
        }
        public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder)
        {
            var auth = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body);

            if (!auth.success)
            {
                return(encoder.Response(403, "Denied").SendNothing());
            }

            var req = JsonConvert.DeserializeObject <HandleBatchSingleOpsRequest>(auth.payload);

            var failed = new List <BatchSingleOp>();
            var tasks  = new List <Task <bool> >();

            foreach (var op in req.ops)
            {
                var sid        = op.sid;
                var field_name = op.field_name;

                if (!await shandler.FieldModificationValidForUser(auth.user, field_name))
                {
                    return(encoder.Response(403, $"Denied Change On Field {field_name}").SendNothing());
                }
            }

            foreach (var op in req.ops)
            {
                var sid        = op.sid;
                var field_name = op.field_name;
                var value      = op.value;

                lock (shandler.items)
                {
                    if (shandler.items.ContainsKey(sid))
                    {
                        try
                        {
                            var tmp   = shandler.items[sid];
                            var field = tmp.GetType().GetField(field_name);
                            field.SetValue(tmp, value.ToObject(field.FieldType));
                            tasks.Add(shandler.WriteItemToJournal(tmp));
                        }
                        catch (Exception ex)
                        {
                            //Logger.WriteDebugString(
                            //    $"Failed during batch single operation. The SID was {sid}. The field name was {field_name}. The value was {value}. The error was:\n{ex}"
                            //);
                            failed.Add(new BatchSingleOp()
                            {
                                field_name = field_name,
                                sid        = sid,
                                value      = value,
                            });
                        }
                    }
                    else
                    {
                        failed.Add(new BatchSingleOp()
                        {
                            field_name = field_name,
                            sid        = sid,
                            value      = value,
                        });
                    }
                }
            }

            Task.WaitAll(tasks.ToArray());

            var resp = new HandleBatchSingleOpsResponse();

            resp.success = true;
            resp.failed  = failed.ToArray();

            await encoder.Response(200, "OK")
            .CacheControlDoNotCache()
            .SendJsonFromObject(resp);

            return(Task.CompletedTask);
        }