public async Task getDataFromServer() { //if (ic.connectivity()) //{ progress.Show(); dynamic value = new ExpandoObject(); value.task_id = task_id_to_send; string json = JsonConvert.SerializeObject(value); try { JsonValue item = await restService.GetComplianceTask(Activity, json, geolocation); comp = JsonConvert.DeserializeObject <ComplianceModel>(item); //db.ComplianceInsert(compliance); progress.Dismiss(); } catch (Exception e) { progress.Dismiss(); } //} // List< ComplianceModel>comp= db.GetCompliance(task_id_to_send); shapes1 = JsonConvert.DeserializeObject <Shapes>(comp.shapes); task_id = comp.task_id; task_description = comp.description; deadline = comp.deadline_date; meatingid = comp.Meeting_ID; rownum = comp.RowNo; //taskcreationDate = comp.task_creation_date; markby = comp.task_mark_by; taskstatus = comp.taskStatus; markto = comp.markTo; markingtype = comp.task_marking_type; taskcreatedby = comp.task_created_by; markingDate = comp.MarkingDate; creationdate = comp.task_creation_date; shapes_from_Comp = comp.shapes; task_name = comp.task_name; List <ComplianceJoinTable> lstAddedCompliance = comp.lstAddedCompliance; List <CommunicationModel> lstCommunication = comp.lstCommunication; List <TaskFilemappingModel2> lstTaskFileMapping = comp.lstTaskFileMapping; List <Comp_AttachmentModel> lstUploadedCompliance = comp.lstUploadedCompliance; image_lst = new List <ComplianceJoinTable>(); audio_lst = new List <ComplianceJoinTable>(); video_lst = new List <ComplianceJoinTable>(); for (int i = 0; i < lstAddedCompliance.Count; i++) { if (lstAddedCompliance[i].file_type.Equals("Image")) { image_lst.Add(lstAddedCompliance[i]); img_max += lstAddedCompliance[i].max_numbers; try { if (lstAddedCompliance[i].complianceType.ToLower().Equals("mandatory")) { img_min += lstAddedCompliance[i].max_numbers; } } catch (Exception e) { } } else if (lstAddedCompliance[i].file_type.Equals("Audio")) { audio_lst.Add(lstAddedCompliance[i]); aud_max += lstAddedCompliance[i].max_numbers; try { if (lstAddedCompliance[i].complianceType.ToLower().Equals("mandatory")) { aud_min += lstAddedCompliance[i].max_numbers; } } catch (Exception e) { } } else if (lstAddedCompliance[i].file_type.Equals("Video")) { video_lst.Add(lstAddedCompliance[i]); vdo_max += lstAddedCompliance[i].max_numbers; try { if (lstAddedCompliance[i].complianceType.ToLower().Equals("mandatory")) { vdo_min += lstAddedCompliance[i].max_numbers; } } catch (Exception e) { progress.Dismiss(); } } } for (int j = 0; j < lstCommunication.Count; j++) { if (lstCommunication[j].role.Equals("Assigner")) { mark_by_num = lstCommunication[j].mobile; } if (lstCommunication[j].role.Equals("Creator")) { creat_by_num = lstCommunication[j].mobile; } } // db.InsertCreatecomplianceAttachData(lstTaskFileMapping, task_id_to_send); audio_comp_lst = new List <Comp_AttachmentModel>(); video_comp_lst = new List <Comp_AttachmentModel>(); image_comp_lst = new List <Comp_AttachmentModel>(); for (int l = 0; l < lstUploadedCompliance.Count; l++) { if (lstUploadedCompliance[l].file_type.Equals("Video")) { video_comp_lst.Add(lstUploadedCompliance[l]); } if (lstUploadedCompliance[l].file_type.Equals("Audio")) { audio_comp_lst.Add(lstUploadedCompliance[l]); } if (lstUploadedCompliance[l].file_type.Equals("Image")) { image_comp_lst.Add(lstUploadedCompliance[l]); } } descrip_text.Text = task_description; createdby_text.Text = taskcreatedby; markby_text.Text = markby; creationdate_text.Text = creationdate; deadline_text.Text = deadline; name_text.Text = task_name; uploadimage.Text = image_comp_lst.Count.ToString(); uploadaudio.Text = audio_comp_lst.Count.ToString(); uploadvideo.Text = video_comp_lst.Count.ToString(); //if(!task_description.Equals("") && task_description != null) //{ // ll_task_desc.Visibility = ViewStates.Visible; // task_desc.Text = task_description; //} //else //{ // ll_task_desc.Visibility = ViewStates.Gone; //} Image_no.Text = img_max.ToString(); Video_no.Text = vdo_max.ToString(); Audio_no.Text = aud_max.ToString(); adapter1 = new GridViewAdapter_Image(Activity, image_comp_lst, FragmentManager); Gridview1.Adapter = adapter1; adapter2 = new GridViewAdapter_Video(Activity, video_comp_lst, FragmentManager); Gridview2.Adapter = adapter2; adapter3 = new GridViewAdapter_Audio(Activity, audio_comp_lst, FragmentManager); Gridview3.Adapter = adapter3; progress.Dismiss(); }
public async override void OnActivityResult(int requestCode, int resultCode, Intent data) { base.OnActivityResult(requestCode, resultCode, data); if (requestCode == Camera && resultCode == (int)Android.App.Result.Ok) { Bitmap bitmap; //Converstion Image Size int height = Resources.DisplayMetrics.HeightPixels; int width = Resources.DisplayMetrics.WidthPixels; using (bitmap = fileImagePath.Path.LoadAndResizeBitmap(width / 4, height / 4)) { } long size1 = fileImagePath.Length() / 1024; string imgsize = size1.ToString(); Comp_AttachmentModel attachmentModel = new Comp_AttachmentModel(); attachmentModel.localPath = imageURL; attachmentModel.file_type = "Image"; attachmentModel.FileName = imageName; attachmentModel.taskId = task_id_to_send; attachmentModel.GeoLocation = geolocation; attachmentModel.FileSize = imgsize; attachmentModel.file_format = ".jpg"; //attachmentModel.max_numbers = image_max.ToString(); Android.App.AlertDialog.Builder alertDiag = new Android.App.AlertDialog.Builder(Activity); alertDiag.SetTitle("Upload Compliance"); alertDiag.SetMessage("press upload to continue"); alertDiag.SetPositiveButton("Upload", (senderAlert, args) => { db.InsertAttachmentData(attachmentModel, "no"); image_comp_lst.AddRange(db.GetAttachmentData(imageName)); adapter1 = new GridViewAdapter_Image(Activity, image_comp_lst, FragmentManager); Gridview1.Adapter = adapter1; if (ic.connectivity()) { postattachmentcomplianceAsync(attachmentModel); //db.updateComplianceattachmentstatus("yes"); //uploadcountimage++; //uploadimage.Text = uploaded; } }); alertDiag.SetNegativeButton("Cancel", (senderAlert, args) => { alertDiag.Dispose(); }); Dialog diag = alertDiag.Create(); diag.Show(); } if (requestCode == Video && resultCode == (int)Android.App.Result.Ok) { long size2 = fileVideoPath.Length() / 1024; string videosize = size2.ToString(); Comp_AttachmentModel attachmentModel = new Comp_AttachmentModel(); attachmentModel.localPath = videoURL; attachmentModel.file_type = "Video"; attachmentModel.FileName = videoName; attachmentModel.taskId = task_id_to_send; attachmentModel.GeoLocation = geolocation; attachmentModel.FileSize = videosize; attachmentModel.file_format = ".mp4"; //attachmentModel.max_numbers = video_max.ToString(); db.InsertAttachmentData(attachmentModel, "no"); //comp_AttachmentModels.Add(attachmentModel); //imagelist.AddRange(comp_AttachmentModels.Where(p => p.Attachment_Type == "Image" )); video_comp_lst.AddRange(db.GetAttachmentData(videoName)); // postattachmentcomplianceAsync(attachmentModel); adapter2 = new GridViewAdapter_Video(Activity, video_comp_lst, FragmentManager); Gridview2.Adapter = adapter2; if (ic.connectivity()) { postattachmentcomplianceAsync(attachmentModel); //db.updateComplianceattachmentstatus("yes"); //uploadcountvideo++; //uploadvideo.Text = uploaded; } } if (requestCode == VOICE && resultCode == (int)Android.App.Result.Ok) { var matches = data.GetStringArrayListExtra(RecognizerIntent.ExtraResults); if (matches.Count != 0) { string textInput = Description.Text + matches[0]; // limit the output to 500 characters if (textInput.Length > 500) { textInput = textInput.Substring(0, 500); } Description.Text = textInput; } else { Description.Text = "No speech was recognised"; } } uploadimage.Text = image_comp_lst.Count.ToString(); uploadaudio.Text = audio_comp_lst.Count.ToString(); uploadvideo.Text = video_comp_lst.Count.ToString(); }
protected override void OnActivityResult(int requestCode, [GeneratedEnum] Result resultCode, Intent data) { base.OnActivityResult(requestCode, resultCode, data); if (requestCode == Camera) { Bitmap bitmap = BitmapFactory.DecodeFile(imageURL); Comp_AttachmentModel attachmentModel = new Comp_AttachmentModel(); attachmentModel.Attachment_Path = AudioSavePathInDevice; attachmentModel.Attachment_Type = "Image"; attachmentModel.Attachment_Name = audioname; attachmentModel.Task_id = task_id_to_send; db.InsertAttachmentData(attachmentModel); //comp_AttachmentModels.Add(attachmentModel); //imagelist.AddRange(comp_AttachmentModels.Where(p => p.Attachment_Type == "Image" )); imagelist = db.GetAttachmentData("Image"); adapter1 = new GridViewAdapter_Image(this, imagelist); Gridview1.Adapter = adapter1; Gridview1.setExpanded(true); Gridview1.ChoiceMode = (ChoiceMode)AbsListViewChoiceMode.MultipleModal; Gridview1.SetMultiChoiceModeListener(new MultiChoiceModeListener1(this)); imageCount++; } if (requestCode == Video) { Comp_AttachmentModel attachmentModel = new Comp_AttachmentModel(); attachmentModel.Attachment_Path = AudioSavePathInDevice; attachmentModel.Attachment_Type = "Video"; attachmentModel.Attachment_Name = audioname; attachmentModel.Task_id = task_id_to_send; db.InsertAttachmentData(attachmentModel); //comp_AttachmentModels.Add(attachmentModel); //imagelist.AddRange(comp_AttachmentModels.Where(p => p.Attachment_Type == "Image" )); videolist = db.GetAttachmentData("Video"); adapter2 = new GridViewAdapter_Video(this, videolist); Gridview2.Adapter = adapter2; Gridview2.setExpanded(true); Gridview2.ChoiceMode = (ChoiceMode)AbsListViewChoiceMode.MultipleModal; Gridview2.SetMultiChoiceModeListener(new MultiChoiceModeListener2(this)); videoCount++; } if (requestCode == VOICE) { var matches = data.GetStringArrayListExtra(RecognizerIntent.ExtraResults); if (matches.Count != 0) { string textInput = Description.Text + matches[0]; // limit the output to 500 characters if (textInput.Length > 500) { textInput = textInput.Substring(0, 500); } Description.Text = textInput; } else { Description.Text = "No speech was recognised"; } } }