protected void Page_Load(object sender, EventArgs e) { Response.ContentType = "text/plain"; Guid job = Guid.NewGuid(); BatchResizeSettings s = new BatchResizeSettings(MapPath("~/Results/" + job.ToString() + ".zip"), job, new List<BatchResizeItem>()); string[] sourceImages = System.IO.Directory.GetFiles(Path.GetFullPath(MapPath("~/").TrimEnd('\\') + "\\..\\Images"), "*"); foreach (string img in sourceImages) { s.files.Add(new BatchResizeItem(img, null, "?width=100")); } s.ItemEvent += new ItemCallback(s_ItemEvent); s.JobEvent += new JobCallback(s_JobEvent); //Executes on a thread pool thread BatchResizeManager.BeginBatchResize(s); ///Executes synchronously. Use BatchResizeManager.BeginBatchResize(s) for async execution. //new BatchResizeWorker(s).Work(); }
/// <summary> /// Not for external use. Assumes 'state' is a BatchResizeSettings instance, and stores it in a class member. /// Next, it completes (or fails at) the job. Throws exceptions only if (a) state is not a BatchResizeSettings instance, or (b) an exception is thrown by and handler of the JobEvent (Failed) event. /// </summary> /// <param name="state"></param> protected internal void Work(object state) { s = (BatchResizeSettings)state; try { //Time the job. jobTimer = new Stopwatch(); jobTimer.Start(); //1) Eliminate duplicate target filenames, sanitize, and add appropriate extensions, then mark all items as immutable s.FixDuplicateFilenames(); s.AppendFinalExtensions(); s.MarkItemsImmutable(); //2) Build dictionary based on target filenames. items = BuildDict(s.files); //3) Verify destination folder exists, otherwise create it. (early detection of security failures) CreateDestinationFolder(); //4) Open zip file (writes to a temporary location, then moves the file when it is done) using (Ionic.Zip.ZipFile z = new Ionic.Zip.ZipFile(s.destinationFile)) { //Compress for speed. z.CompressionLevel = Ionic.Zlib.CompressionLevel.BestSpeed; //Fire events instead of throwing exceptions z.ZipErrorAction = ZipErrorAction.InvokeErrorEvent; z.ZipError += new EventHandler <ZipErrorEventArgs>(z_ZipError); z.SaveProgress += new EventHandler <SaveProgressEventArgs>(z_SaveProgress); //5) Create a list to store item resize/compression results results = new List <ItemResult>(); //6) Queue the files that will be included in the archive, specifying a callback for items foreach (BatchResizeItem i in s.files) { try{ ZipEntry ze = z.AddEntry(i.TargetFilename, new WriteDelegate(WriteItemCallback)); //Don't try to compress files we have already resized, it won't help, just slow things down and possibly use more space. if (wouldResize(i)) { ze.CompressionMethod = CompressionMethod.None; } else { //Set the times //ze.SetEntryTimes(System.IO.File.GetLastWriteTimeUtc(i.PhysicalPath), // System.IO.File.GetLastAccessTimeUtc(i.PhysicalPath), // System.IO.File.GetCreationTimeUtc(i.PhysicalPath)); //Never mind, not desired behavior. } }catch (Exception ex) { ItemFailed(i, ex); } } //7) Begin processing the queue. //As each file is written to the archive, a callback is invoked which actually provides the resized stream. z.Save(); } //8) Close zip file jobTimer.Stop(); //9) Fire the event that says "We're done!" JobCompleted(s); } catch (Exception ex) { //Admit we failed to whoever is listening. JobFailed(s, ex); } }
//Can initiate resizes, specifying a callback for error and progress reporting /// <summary> /// Begins a batch resize operation on a background thread. If the ASP.NET process recycles, or the server reboots, the process will be aborted. /// /// </summary> /// <param name="s"></param> public static void BeginBatchResize(BatchResizeSettings s) { ThreadPool.QueueUserWorkItem(new BatchResizeWorker(s).Work, s); }
/// <summary> /// Creates a new worker instance. Do not modify the settings while executing the Work() method. /// </summary> /// <param name="s"></param> public BatchResizeWorker(BatchResizeSettings s) { this.s = s; }
private void JobFailed(BatchResizeSettings s, Exception ex) { JobEventArgs args = new JobEventArgs(s.jobId, new JobResult(results, false, ex, GetJobStats())); s.FireJobEvent(args); }
private void JobCompleted(BatchResizeSettings s) { JobEventArgs args = new JobEventArgs(s.jobId, new JobResult(results, true, null, GetJobStats())); s.FireJobEvent(args); }
private void JobFailed(BatchResizeSettings s,Exception ex) { JobEventArgs args = new JobEventArgs(s.jobId, new JobResult(results,false, ex, GetJobStats())); s.FireJobEvent(args); }
private void JobCompleted(BatchResizeSettings s) { JobEventArgs args = new JobEventArgs(s.jobId, new JobResult(results, true,null, GetJobStats())); s.FireJobEvent(args); }
/// <summary> /// Not for external use. Assumes 'state' is a BatchResizeSettings instance, and stores in in a class member. /// Next, it completes (or fails at) the job. Throws exceptions only if (a) state is not a BatchResizeSettings instance, or (b) an exception is thrown by and handler of the JobEvent (Failed) event. /// </summary> /// <param name="state"></param> protected internal void Work(object state) { s = (BatchResizeSettings)state; try { //Time the job. jobTimer = new Stopwatch(); jobTimer.Start(); //1) Eliminate duplicate target filenames, sanitize, and add appropriate extensions, then mark all items as immutable s.FixDuplicateFilenames(); s.AppendFinalExtensions(); s.MarkItemsImmutable(); //2) Build dictionary based on target filenames. items = BuildDict(s.files); //3) Verify destination folder exists, otherwise create it. (early detection of security failures) CreateDestinationFolder(); //4) Open zip file (writes to a temporary location, then moves the file when it is done) using (Ionic.Zip.ZipFile z = new Ionic.Zip.ZipFile(s.destinationFile)) { //Compress for speed. z.CompressionLevel = Ionic.Zlib.CompressionLevel.BestSpeed; //Fire events instead of throwing exceptions z.ZipErrorAction = ZipErrorAction.InvokeErrorEvent; z.ZipError +=new EventHandler<ZipErrorEventArgs>(z_ZipError); z.SaveProgress +=new EventHandler<SaveProgressEventArgs>(z_SaveProgress); //5) Create a list to store item resize/compression results results = new List<ItemResult>(); //6) Queue the files that will be included in the archive, specifying a callback for items foreach (BatchResizeItem i in s.files){ try{ ZipEntry ze = z.AddEntry(i.TargetFilename, new WriteDelegate(WriteItemCallback)); //Don't try to compress files we have already resized, it won't help, just slow things down and possibly use more space. if (wouldResize(i)) ze.CompressionMethod = CompressionMethod.None; else { //Set the times //ze.SetEntryTimes(System.IO.File.GetLastWriteTimeUtc(i.PhysicalPath), // System.IO.File.GetLastAccessTimeUtc(i.PhysicalPath), // System.IO.File.GetCreationTimeUtc(i.PhysicalPath)); //Nevermind, not desired behavior. } }catch (Exception ex){ ItemFailed(i,ex); } } //7) Begin proccessing the queue. //As each file is written to the archive, a callback is invoked which actually provides the resized stream. z.Save(); } //8) Close zip file jobTimer.Stop(); //9) Fire the event that says "We're done!" JobCompleted(s); } catch (Exception ex) { //Admit we failed to whoever is listening. JobFailed(s,ex); } }
void bwResizeBatch_DoWork(object sender, DoWorkEventArgs e) { if (DoWorkEvent != null) DoWorkEvent(this, new DoWorkEventArgs(e.Argument)); count = 0; try { Dictionary<string, object> UserInputs = e.Argument as Dictionary<string, object>; if (UserInputs != null) { ResizeSettings rs = new ResizeSettings((string)UserInputs["querystring"]); batchItems = (List<BatchInfo>)UserInputs["batchItems"]; total = batchItems.Count; Guid job = Guid.NewGuid(); var s = new BatchResizeSettings(Properties.Settings.Default.saveZipPath, job, new List<BatchResizeItem>()); foreach (var item in batchItems) s.files.Add(new BatchResizeItem(item.FullPath, null, rs.ToStringEncoded())); s.ItemEvent += s_ItemEvent; s.JobEvent += s_JobEvent; new BatchResizeWorker(s).Work(); } } catch (Exception ex) { // handle the exception. } }