/// <summary> /// Helper method used to send data stored as a resource under the webres folder (namespace). /// </summary> /// <param name="target">The name of the resource in the webres folder/namespace.</param> /// <param name="state">Pass-through parameter.</param> /// <param name="request">Pass-through parameter.</param> /// <param name="body">Pass-through parameter.</param> /// <param name="encoder">Pass-through parameter.</param> /// <returns>A task object which may or may not be completed already. This also may need to be returned as a dependency of the handler completion.</returns> private static async Task <Task> StaticRoute( string target, ServerHandler state, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { await Util.ReadStreamUntilEndAndDiscardDataAsync(body); #if USE_SOURCE_DIRECTORY_WEBRES var strm = File.OpenRead( Path.Combine( @"/home/kmcguire/extra/old/source/repos/MDACSDatabase/MDACSDatabase/webres", target ) ); #else var strm = Assembly.GetExecutingAssembly().GetManifestResourceStream($"MDACSDatabase.webres.{target}"); if (strm == null) { return(await encoder.Response(404, "Not Found") .CacheControl("no-cache, no-store, must-revalidate") .SendNothing()); } #endif return(await encoder.Response(200, "OK") .ContentType_GuessFromFileName(target) .CacheControl("public, max-age=0") .SendStream(strm)); }
/// <summary> /// Handles returning information about space/bytes usage of the database. /// </summary> /// <param name="shandler"></param> /// <param name="request"></param> /// <param name="body"></param> /// <param name="encoder"></param> /// <returns></returns> public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var buf = new byte[512]; int cnt; do { cnt = await body.ReadAsync(buf, 0, buf.Length); } while (cnt > 0); InternalVersonInfo ver_info; using (var strm = Assembly.GetExecutingAssembly().GetManifestResourceStream("MDACSDatabase.buildinfo.json")) { var json_data = await new StreamReader(strm).ReadToEndAsync(); ver_info = JsonConvert.DeserializeObject <InternalVersonInfo>(json_data); } var resp = new VersionResponse() { version = ver_info.version, }; await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp)); return(Task.CompletedTask); }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth_resp.success) { return(encoder.Response(403, "Denied").SendNothing()); } var reply = new HandleDataReply(); reply.data = new Item[shandler.items.Count]; lock (shandler.items) { int x = 0; foreach (var pair in shandler.items) { if ( Helpers.CanUserSeeItem(auth_resp.user, pair.Value) ) { reply.data[x++] = pair.Value; } } } return(encoder.Response(200, "OK").SendJsonFromObject(reply)); }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth.success) { await encoder.WriteQuickHeader(403, "Must be authenticated."); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(new CommitConfigurationResponse() { success = false, })); return(Task.CompletedTask); } if (!auth.user.admin) { await encoder.WriteQuickHeader(403, "Must be admin."); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(new CommitConfigurationResponse() { success = false, })); return(Task.CompletedTask); } var req = JsonConvert.DeserializeObject <CommitConfigurationRequest>(auth.payload); var fp = File.OpenWrite( Path.Combine(shandler.config_path, $"config_{req.deviceid}.data") ); var file_data = new ConfigFileData() { userid = req.userid, config_data = req.config_data, }; // TODO: *think* reliable operation and atomic as possible var config_bytes_utf8 = Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(file_data) ); await fp.WriteAsync(config_bytes_utf8, 0, config_bytes_utf8.Length); fp.Dispose(); await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(new CommitConfigurationResponse() { success = true, })); return(Task.CompletedTask); }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth.success) { return(encoder.Response(403, "Denied").SendNothing()); } var resp = new EnumerateConfigurationsResponse(); resp.success = true; resp.configs = new Dictionary <string, string>(); try { foreach (var node in Directory.EnumerateFiles(shandler.config_path)) { var fnode = Path.Combine(shandler.data_path, node); var fnode_filename = Path.GetFileName(fnode); if (fnode_filename.StartsWith("config_") && fnode_filename.EndsWith(".data")) { var fd = File.OpenRead(fnode); var buf = new byte[fd.Length]; int cnt = 0; while (cnt < buf.Length) { cnt += await fd.ReadAsync(buf, cnt, buf.Length - cnt); } var buf_text = Encoding.UTF8.GetString(buf); var id = fnode_filename.Substring(fnode_filename.IndexOf("_") + 1); id = id.Substring(0, id.LastIndexOf(".")); resp.configs[id] = buf_text; fd.Dispose(); } } } catch (Exception ex) { //Logger.WriteCriticalString($"Error during configuration enumeration as follows:\n{ex}"); return(encoder.Response(500, "Error").SendNothing()); } return(encoder.Response(200, "OK").SendJsonFromObject(resp)); }
/// <summary> /// Handles deleting existing data files. Requires authentication and a special privilege. /// </summary> /// <param name="shandler"></param> /// <param name="request"></param> /// <param name="body"></param> /// <param name="encoder"></param> /// <returns></returns> public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth_resp.success) { return(encoder.Response(403, "Denied").SendNothing()); } if (!auth_resp.user.can_delete) { return(encoder.Response(403, "Denied").SendNothing()); } var sreq = JsonConvert.DeserializeObject <DeleteRequest>(auth_resp.payload); var sid = sreq.sid; if (shandler.items.ContainsKey(sid)) { var item = shandler.items[sid]; try { File.Delete(item.fqpath); } catch (Exception) { await encoder.WriteQuickHeaderAndStringBody( 500, "Error", JsonConvert.SerializeObject(new DeleteResponse() { success = false, }) ); return(Task.CompletedTask); } if (item.fqpath != null && item.fqpath.Length > 0) { shandler.UsedSpaceSubtract((long)item.datasize); } item.fqpath = null; await shandler.WriteItemToJournal(item); } await encoder.WriteQuickHeaderAndStringBody( 200, "Deleted", JsonConvert.SerializeObject(new DeleteResponse() { success = true, }) ); return(Task.CompletedTask); }
public static async Task <Task> Index( ServerHandler state, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { await Util.ReadStreamUntilEndAndDiscardDataAsync(body); return(await StaticRoute("index.html", state, request, body, encoder)); }
public static async Task <Task> Utility( ServerHandler state, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { await Util.ReadStreamUntilEndAndDiscardDataAsync(body); return(await StaticRoute(request.query_string, state, request, body, encoder)); }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { await MDACS.Server.Util.ReadStreamUntilEndAndDiscardDataAsync(body); var resp = new JObject(); resp["dbUrl"] = "."; resp["authUrl"] = shandler.auth_url; await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp)); return(Task.CompletedTask); }
/// <summary> /// Handles returning information about space/bytes usage of the database. /// </summary> /// <param name="shandler"></param> /// <param name="request"></param> /// <param name="body"></param> /// <param name="encoder"></param> /// <returns></returns> public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth_resp.success) { throw new UnauthorizedException(); } var resp = new JObject(); resp["success"] = true; resp["used_bytes"] = shandler.GetUsedSpace(); resp["max_bytes"] = shandler.GetMaxSpace(); await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp)); return(Task.CompletedTask); }
/// <summary> /// /// </summary> /// <param name="request"></param> /// <param name="body"></param> /// <param name="encoder"></param> /// <exception cref="UnauthorizedException">User has insufficient priviledges to modify the item.</exception> /// <exception cref="AuthenticationException">User is not valid for access of any type.</exception> /// <exception cref="InvalidArgumentException">One of the arguments was not correct or the reason for failure.</exception> /// <exception cref="ProgramException">Anything properly handled but needs handling for acknowlegement purposes.</exception> /// <exception cref="Exception">Anything else could result in instability.</exception> public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth_resp = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth_resp.success) { throw new UnauthorizedException(); } var sreq = JsonConvert.DeserializeObject <API.Requests.CommitSetRequest>(auth_resp.payload); Monitor.Enter(shandler.items); Item item; #if DEBUG //Logger.WriteDebugString($"sreq.security_id={sreq.security_id}"); #endif try { if (!shandler.items.ContainsKey(sreq.security_id)) { await encoder.WriteQuickHeader(404, "Not Found"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } item = shandler.items[sreq.security_id]; } catch (Exception ex) { #if DEBUG //Logger.WriteDebugString($"Exception on getting item was:\n{ex}"); #endif await encoder.WriteQuickHeader(500, "Error"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } finally { Monitor.Exit(shandler.items); } if (!Helpers.CanUserModifyItem(auth_resp.user, item)) { #if DEBUG //Logger.WriteDebugString($"User was not authorized to write to item."); #endif await encoder.WriteQuickHeader(403, "Not Authorized"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } // Check fields to see if the user is authorized to modify them. foreach (var pair in sreq.meta) { if (!await shandler.FieldModificationValidForUser(auth_resp.user, pair.Key)) { await encoder.WriteQuickHeader(403, "Not Authorized"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } } try { foreach (var pair in sreq.meta) { // Reflection simplified coding time at the expense of performance. var field = item.GetType().GetField(pair.Key); field.SetValue(item, pair.Value.ToObject(field.FieldType)); #if DEBUG //Logger.WriteDebugString($"Set field {field} of {sreq.meta} to {pair.Value.ToString()}."); #endif } shandler.items[sreq.security_id] = item; if (!await shandler.WriteItemToJournal(item)) { #if DEBUG //Logger.WriteDebugString($"Error happened when writing to the journal for a commit set operation."); #endif await encoder.WriteQuickHeader(500, "Error"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } } catch (Exception) { #if DEBUG //Logger.WriteDebugString($"Error happened when writing to journal or setting item fields during commit set operation."); #endif await encoder.WriteQuickHeader(500, "Error"); await encoder.BodyWriteSingleChunk(""); return(Task.CompletedTask); } var resp = new MDACS.API.Responses.CommitSetResponse(); resp.success = true; await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp)); return(Task.CompletedTask); }
public static async Task <API.Responses.AuthCheckResponse> ReadMessageFromStreamAndAuthenticate(ServerHandler shandler, int max_size, Stream input_stream) { var buf = new byte[1024 * 32]; int pos = 0; var a = input_stream.CanRead; var b = input_stream.CanTimeout; Debug.WriteLine("Reading authenticated payload."); while (pos < buf.Length) { var cnt = await input_stream.ReadAsync(buf, pos, buf.Length - pos); if (cnt < 1) { break; } pos += cnt; } Debug.WriteLine("Done reading authenticated payload."); var buf_utf8_string = Encoding.UTF8.GetString(buf, 0, pos); Debug.WriteLine(buf_utf8_string); var resp = await MDACS.API.Auth.AuthenticateMessageAsync( shandler.auth_url, buf_utf8_string ); Debug.WriteLine("Handing back result from authenticated payload."); return(resp); }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var buf = new byte[4096]; int ndx = 0; int cnt; while ((cnt = await body.ReadAsync(buf, ndx, buf.Length - ndx)) > 0) { ndx += cnt; } var buf_utf8 = Encoding.UTF8.GetString(buf, 0, ndx); //Logger.WriteDebugString($"buf_utf8={buf_utf8}"); var req = JsonConvert.DeserializeObject <DeviceConfigRequest>(buf_utf8); var path = Path.Combine(shandler.config_path, $"config_{req.deviceid}.data"); if (!File.Exists(path)) { var _fp = File.OpenWrite( path ); var _tmp = new JObject(); _tmp["userid"] = null; _tmp["config_data"] = req.current_config_data; var _config_bytes_utf8 = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(_tmp)); await _fp.WriteAsync(_config_bytes_utf8, 0, _config_bytes_utf8.Length); _fp.Dispose(); } //Logger.WriteDebugString($"The config path is {path}."); var fp = File.OpenRead( path ); var config_bytes_utf8 = new byte[fp.Length]; await fp.ReadAsync(config_bytes_utf8, 0, config_bytes_utf8.Length); fp.Dispose(); var config_data = Encoding.UTF8.GetString(config_bytes_utf8); JObject tmp = JsonConvert.DeserializeObject <JObject>(config_data); tmp["config_data"] = JsonConvert.SerializeObject( JsonConvert.DeserializeObject <JObject>(tmp["config_data"].Value <string>()), Formatting.Indented ); tmp["config_data"] = tmp["config_data"].Value <string>().Replace("\n", "\r\n"); var resp = new DeviceConfigResponse(); resp.success = true; resp.config_data = JsonConvert.SerializeObject(tmp); Debug.WriteLine($"@@@@@ {resp.config_data}"); await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(resp)); return(Task.CompletedTask); }
public static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("Provide path or file that contains the JSON configuration. If file does not exit then default one will be created."); return; } // Attach a handler for logger events. //Logger.handler_event += LoggerOutput; if (!File.Exists(args[0])) { ProgramConfig defcfg = new ProgramConfig { metajournal_path = "The file path to the metadata journal.", data_path = "The path to the directory containing the data files backing the journal.", config_path = "The path to the directory holding device configuration files.", auth_url = "The HTTP or HTTPS URL to the authentication service.", ssl_cert_path = "The PFX file that contains both the private and public keys for communications.", universal_records_key_path = "The PFX file for the universal records system.", notification_url = "Use null or the URL for the notification service.", port = 34001, }; var defcfgfp = File.CreateText(args[0]); defcfgfp.Write(JsonConvert.SerializeObject(defcfg, Formatting.Indented)); defcfgfp.Dispose(); Console.WriteLine("Default configuration created at location specified."); return; } var cfgfp = File.OpenText(args[0]); var cfg = JsonConvert.DeserializeObject <ProgramConfig>(cfgfp.ReadToEnd()); cfgfp.Dispose(); var handler = new ServerHandler( metajournal_path: cfg.metajournal_path, data_path: cfg.data_path, config_path: cfg.config_path, auth_url: cfg.auth_url, cluster_size: 4096, max_storage_space: (long)1024 * 1024 * 1024 * 680, universal_records_key_path: cfg.universal_records_key_path, universal_records_key_pass: cfg.universal_records_key_pass, universal_records_url: cfg.universal_records_url, notification_post_url: cfg.notification_url ); var handlers = new Dictionary <String, SimpleServer <ServerHandler> .SimpleHTTPHandler>(); handlers.Add("/upload", HandleUpload.Action); handlers.Add("/device-config", HandleDeviceConfig.Action); handlers.Add("/commit_batch_single_ops", HandleBatchSingleOps.Action); handlers.Add("/download", HandleDownload.Action); handlers.Add("/enumerate-configurations", HandleEnumerateConfigurations.Action); handlers.Add("/data", HandleData.Action); handlers.Add("/commitset", HandleCommitSet.Action); handlers.Add("/commit-configuration", HandleCommitConfiguration.Action); handlers.Add("/delete", HandleDelete.Action); handlers.Add("/spaceinfo", HandleSpaceInfo.Action); handlers.Add("/version", HandleVersion.Action); handlers.Add("/", HandleLocalWebRes.Index); handlers.Add("/utility", HandleLocalWebRes.Utility); handlers.Add("/get-config", HandleConfigRequest.Action); var server = SimpleServer <ServerHandler> .Create( handler, handlers, cfg.port, cfg.ssl_cert_path, cfg.ssl_cert_pass ); var a = new Thread(() => { server.Wait(); }); a.Start(); a.Join(); // Please do not let me forget this convulted retarded sequence to get from PEM to PFX with the private key. // openssl crl2pkcs7 -nocrl -inkey privkey.pem -certfile fullchain.pem -out test.p7b // openssl pkcs7 -print_certs -in test.p7b -out test.cer // openssl pkcs12 -export -in test.cer -inkey privkey.pem -out test.pfx -nodes // THEN... for Windows, at least, import into cert store, then export with private key and password. // FINALLY... use the key now and make sure its X509Certificate2.. notice the 2 on the end? Yep. }
public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var auth = await Helpers.ReadMessageFromStreamAndAuthenticate(shandler, 1024 * 16, body); if (!auth.success) { return(encoder.Response(403, "Denied").SendNothing()); } var req = JsonConvert.DeserializeObject <HandleBatchSingleOpsRequest>(auth.payload); var failed = new List <BatchSingleOp>(); var tasks = new List <Task <bool> >(); foreach (var op in req.ops) { var sid = op.sid; var field_name = op.field_name; if (!await shandler.FieldModificationValidForUser(auth.user, field_name)) { return(encoder.Response(403, $"Denied Change On Field {field_name}").SendNothing()); } } foreach (var op in req.ops) { var sid = op.sid; var field_name = op.field_name; var value = op.value; lock (shandler.items) { if (shandler.items.ContainsKey(sid)) { try { var tmp = shandler.items[sid]; var field = tmp.GetType().GetField(field_name); field.SetValue(tmp, value.ToObject(field.FieldType)); tasks.Add(shandler.WriteItemToJournal(tmp)); } catch (Exception ex) { //Logger.WriteDebugString( // $"Failed during batch single operation. The SID was {sid}. The field name was {field_name}. The value was {value}. The error was:\n{ex}" //); failed.Add(new BatchSingleOp() { field_name = field_name, sid = sid, value = value, }); } } else { failed.Add(new BatchSingleOp() { field_name = field_name, sid = sid, value = value, }); } } } Task.WaitAll(tasks.ToArray()); var resp = new HandleBatchSingleOpsResponse(); resp.success = true; resp.failed = failed.ToArray(); await encoder.Response(200, "OK") .CacheControlDoNotCache() .SendJsonFromObject(resp); return(Task.CompletedTask); }
/// <summary> /// Handles reading header and data for data upload. A critical routine that employs as many checks as needed /// to ensure that written data is verified as written and correct. /// </summary> /// <param name="shandler"></param> /// <param name="request"></param> /// <param name="body"></param> /// <param name="encoder"></param> /// <returns></returns> public static async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { var buf = new byte[1024 * 32]; int bufndx = 0; int cnt; int tndx; do { cnt = await body.ReadAsync(buf, bufndx, buf.Length - bufndx); if (cnt > 0) { bufndx += cnt; } tndx = Array.IndexOf(buf, (byte)'\n'); if (bufndx >= buf.Length && tndx < 0) { throw new ProgramException("On receiving upload header. The header size exceeded 4096-bytes."); } } while (cnt > 0 && tndx < 0); var hdrstr = Encoding.UTF8.GetString(buf, 0, tndx).Trim(); Debug.WriteLine(hdrstr); var auth_package = JsonConvert.DeserializeObject <MDACS.API.Auth.Msg>(hdrstr); var payload = auth_package.payload; /*var info = await MDACS.API.Auth.AuthenticateMessageAsync(shandler.auth_url, auth_package); * * if (!info.success) * { * throw new UnauthorizedException(); * } */ var hdr = JsonConvert.DeserializeObject <MDACS.API.Requests.UploadHeader>(payload); Array.Copy(buf, tndx + 1, buf, 0, bufndx - (tndx + 1)); // Quasi-repurpose the variable `bufndx` to mark end of the slack data. bufndx = bufndx - (tndx + 1); var data_node = String.Format("{0}_{1}_{2}_{3}.{4}", hdr.datestr, hdr.userstr, hdr.devicestr, hdr.timestr, hdr.datatype ); var data_node_path = Path.Combine(shandler.data_path, data_node); // Make the name unique and keep pertinent information in the event something fails. var temp_data_node_path = Path.Combine( shandler.data_path, $"temp_{DateTime.Now.ToFileTime().ToString()}_{data_node}" ); // TODO: hash data then rehash data after writing to storage, maybe? SHA512 hasher; var fhash_sha512 = new byte[512 / 8]; FileStream fp = null; try { #if DEBUG //Logger.WriteDebugString($"Opening {temp_data_node_path} as temporary output for upload."); #endif fp = File.Open(temp_data_node_path, FileMode.Create); await fp.WriteAsync(buf, 0, bufndx); long total = 0; hasher = SHA512Managed.Create(); hasher.Initialize(); while (true) { var _cnt = await body.ReadAsync(buf, 0, buf.Length); if (_cnt < 1) { break; } Debug.WriteLine($"buf={buf} _cnt={_cnt}"); // https://stackoverflow.com/questions/20634827/how-to-compute-hash-of-a-large-file-chunk hasher.TransformBlock(buf, 0, _cnt, null, 0); total += _cnt; await fp.WriteAsync(buf, 0, _cnt); } hasher.TransformFinalBlock(buf, 0, 0); fhash_sha512 = hasher.Hash; #if DEBUG //Logger.WriteDebugString($"Wrote {total} bytes to {temp_data_node_path} as temporary output for upload."); #endif await body.CopyToAsync(fp); await fp.FlushAsync(); fp.Dispose(); } catch (Exception ex) { if (fp != null) { fp.Dispose(); } #if DEBUG //Logger.WriteDebugString($"Exception on {temp_data_node_path} with:\n{ex}"); #endif File.Delete(temp_data_node_path); await encoder.WriteQuickHeader(500, "Problem"); await encoder.BodyWriteSingleChunk("Problem during write to file from body stream."); return(Task.CompletedTask); } #if DEBUG //Logger.WriteDebugString($"Upload for {temp_data_node_path} is done."); #endif if (!await WaitForFileSizeMatch(temp_data_node_path, (long)hdr.datasize, 3)) { File.Delete(temp_data_node_path); await encoder.WriteQuickHeader(504, "Timeout"); await encoder.BodyWriteSingleChunk("The upload byte length of the destination never reached the intended stream size."); return(Task.CompletedTask); } try { if (File.Exists(data_node_path)) { await CheckedFileMoveAsync(data_node_path, $"{data_node_path}.moved.{DateTime.Now.ToFileTime().ToString()}"); } await CheckedFileMoveAsync(temp_data_node_path, data_node_path); } catch (Exception) { // Delete the temporary since we should have saved the original. File.Delete(temp_data_node_path); // Move the original back to the original filename. await CheckedFileMoveAsync($"{data_node_path}.moved.{DateTime.Now.ToFileTime().ToString()}", data_node_path); await encoder.WriteQuickHeader(500, "Problem"); await encoder.BodyWriteSingleChunk("Unable to do a CheckFileMoveAsync."); return(Task.CompletedTask); } if (!await WaitForFileSizeMatch(data_node_path, (long)hdr.datasize, 3)) { await encoder.WriteQuickHeader(504, "Timeout"); await encoder.BodyWriteSingleChunk("Timeout waiting for size change."); return(Task.CompletedTask); } Item item = new Item(); hasher = new SHA512Managed(); var security_id_bytes = hasher.ComputeHash(Encoding.UTF8.GetBytes(data_node)); item.datasize = hdr.datasize; item.datatype = hdr.datatype; item.datestr = hdr.datestr; item.devicestr = hdr.devicestr; item.duration = -1.0; item.fqpath = data_node_path; item.metatime = DateTime.Now.ToFileTimeUtc(); item.node = data_node; item.note = ""; item.security_id = BitConverter.ToString(security_id_bytes).Replace("-", "").ToLower(); item.timestr = hdr.timestr; item.userstr = hdr.userstr; item.state = ""; item.manager_uuid = shandler.manager_uuid; item.data_hash_sha512 = Convert.ToBase64String(fhash_sha512); item.duration = MDACS.Database.MediaTools.MP4Info.GetDuration(item.fqpath); await shandler.WriteItemToJournal(item); var uresponse = new MDACS.API.Responses.UploadResponse(); uresponse.success = true; uresponse.fqpath = item.fqpath; uresponse.security_id = item.security_id; shandler.UsedSpaceAdd((long)hdr.datasize); await encoder.WriteQuickHeader(200, "OK"); await encoder.BodyWriteSingleChunk(JsonConvert.SerializeObject(uresponse)); // Allow current execution to continue while spinning off the // a handler for this successful upload. shandler.HouseworkAfterUploadSuccess(item); return(Task.CompletedTask); }
static public async Task <Task> Action(ServerHandler shandler, HTTPRequest request, Stream body, IProxyHTTPEncoder encoder) { // This URL route was never implemented with user security. The security ID itself is the security, but that // could be revised. //var auth = await ReadMessageFromStreamAndAuthenticate(1024 * 16, body); //if (!auth.success) //{ // throw new UnauthorizedException(); //} String download_sid; if (request.query_string.Length > 0) { download_sid = request.query_string; } else { //req = JsonConvert.DeserializeObject<HandleDownloadRequest>(auth.payload); //download_sid = req.security_id; throw new InvalidArgumentException(); } Item item; lock (shandler.items) { if (!shandler.items.ContainsKey(download_sid)) { throw new InvalidArgumentException(); } item = shandler.items[download_sid]; } var item_data_path = Path.Combine(shandler.data_path, item.node); var fd = File.OpenRead(item_data_path); ulong offset_start = 0; // How can a stream be negative? Could this stream ever be negative? What purpose does it suit? ulong offset_size = (ulong)fd.Length; ulong total_size = (ulong)fd.Length; String response_code = "200"; if (request.internal_headers.ContainsKey("range")) { var range_str = request.internal_headers["range"]; var eqndx = range_str.IndexOf("="); if (eqndx > -1) { var range_sub_str = range_str.Substring(eqndx + 1).Trim(); var range_nums_strs = range_sub_str.Split("-"); if (range_nums_strs.Length > 1) { offset_start = ulong.Parse(range_nums_strs[0]); if (range_nums_strs[1].Equals("")) { offset_size = (ulong)fd.Length - offset_start; } else { offset_size = ulong.Parse(range_nums_strs[1]) - offset_start + 1; } response_code = "206"; } } } checked { fd.Seek((long)offset_start, SeekOrigin.Begin); } fd.Seek((long)offset_start, SeekOrigin.Begin); String mime_type = null; switch (item.datatype) { case "mp4": mime_type = "video/mp4"; break; case "jpg": mime_type = "image/jpeg"; break; } using (var de_stream = new LimitedStream(fd, offset_size)) { var header = new Dictionary <String, String>(); header.Add("$response_code", response_code); header.Add("$response_text", "Partial Content"); header.Add("content-disposition", String.Format("inline; filename=\"{0}_{1}_{2}_{3}.{4}\"", item.datestr, item.userstr, item.devicestr, item.timestr, item.datatype ) ); header.Add("accept-ranges", "bytes"); header.Add("content-range", String.Format("bytes {0}-{1}/{2}", offset_start, offset_size + offset_start - 1, total_size ) ); if (mime_type != null) { header.Add("content-type", mime_type); } await encoder.WriteHeader(header); await encoder.BodyWriteStream(de_stream); return(Task.CompletedTask); } }