internal void RunTest1() { //BlockStore.AddBlock("C:\\Users\\jony\\Files\\Data\\SEDM\\v3\\2009\\October2009\\01Oct0900_0.zip"); string datRoot = "C:\\Users\\jony\\Files\\Data\\SEDM\\v3\\2009\\October2009\\01Oct0900_"; List<string> files = new List<string>(); for (int i = 0; i < 10; i++) files.Add(datRoot + i + ".zip"); BlockStore.AddBlocks(files.ToArray()); }
public int gemData(string Forsøgsnavn, List<double> Rådata) { Datostempel = DateTime.Now; double[] BLOBListe = Rådata.ToArray(); byte[] BYTEliste = Rådata.SelectMany(value => BitConverter.GetBytes(value)).ToArray(); String query = "INSERT INTO SEMPRJ3 (Forsøgsnavn, Datostempel, Blodtryksmåling) " + "Output Inserted.Id " + "VALUES(@Forsøgsnavn, @Dato, @MåleListe) "; conn.Open(); SqlCommand command = new SqlCommand(query, conn); command.Parameters.AddWithValue("@Forsøgsnavn", Forsøgsnavn); command.Parameters.Add("@Dato", SqlDbType.DateTime).Value = Datostempel; command.Parameters.Add("@MåleListe", SqlDbType.Image).Value = BYTEliste; GemtId = Convert.ToInt32(command.ExecuteScalar()); conn.Close(); return GemtId; }
private static KeyValuePair<Item, int>[] GetAllItems(IEnumerable<ItemReference> references) { List<KeyValuePair<Item, int>> result = new List<KeyValuePair<Item, int>>(); foreach (ItemReference reference in references) { Database database = Factory.GetDatabase(reference.ItemUri.DatabaseName); Item item = database.GetItem(reference.ItemUri.ItemID); DoJob(item, reference.Recursive, delegate (Item it) { Item[] itemsWithMedia = Utils.GetAllVersionsWithMedia(it); if (itemsWithMedia != null && itemsWithMedia.Length > 0) { result.Add(new KeyValuePair<Item, int>(it, itemsWithMedia.Length)); } }); } return result.ToArray(); }
public string[] GetItemsName() { List<string> name = new List<string>(); productId = (IList<string>)Session["ProId"]; foreach (var item in productId) { int id = Convert.ToInt32(item); name.Add(db.Products.Find(id).ProductName); } return name.ToArray(); }
/// <summary> /// Handles the Click event of the SearchSubmitButton control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> private void SearchSubmitButton_Click(object sender, EventArgs e) { List<string> parameters = new List<string>(); AddParameter(parameters, "Address", this.SearchAddressTextBox.Visible ? this.SearchAddressTextBox.Text : null); AddParameter(parameters, "City", this.SearchCityTextBox.Visible ? this.SearchCityTextBox.Text : null); AddParameter(parameters, "Region", this.SearchRegionDropDownList.Visible ? this.SearchRegionDropDownList.SelectedValue : null); AddParameter(parameters, "Zip", this.SearchPostalCodeTextBox.Visible ? this.SearchPostalCodeTextBox.Text : null); AddParameter(parameters, "Country", this.SearchCountryDropDownList.Visible ? this.SearchCountryDropDownList.SelectedValue : null); AddParameter(parameters, "Distance", this.SearchRadiusDropDownList.Visible ? this.SearchRadiusDropDownList.SelectedValue : null); AddParameter(parameters, "FilterCountry", this.FilterCountryDropDownList.Visible ? this.FilterCountryDropDownList.SelectedValue : null); this.Response.Redirect(Globals.NavigateURL(this.DisplayTabId, string.Empty, parameters.ToArray())); }
public string ExecuteReader(string queryData) { try { var query = LinqServiceSerializer.Deserialize(queryData); ValidateQuery(query); using (var db = CreateDataContext()) { var obj = db.SetQuery(new QueryContext { SqlQuery = query.Query, Parameters = query.Parameters }); using (var rd = db.ExecuteReader(obj)) { var ret = new LinqServiceResult { QueryID = Guid.NewGuid(), FieldCount = rd.FieldCount, FieldNames = new string[rd.FieldCount], FieldTypes = new Type [rd.FieldCount], Data = new List<string[]>(), }; for (var i = 0; i < ret.FieldCount; i++) { ret.FieldNames[i] = rd.GetName(i); ret.FieldTypes[i] = rd.GetFieldType(i); } var varyingTypes = new List<Type>(); while (rd.Read()) { var data = new string [rd.FieldCount]; var codes = new TypeCode[rd.FieldCount]; for (var i = 0; i < ret.FieldCount; i++) codes[i] = Type.GetTypeCode(ret.FieldTypes[i]); ret.RowCount++; for (var i = 0; i < ret.FieldCount; i++) { if (!rd.IsDBNull(i)) { var code = codes[i]; var type = rd.GetFieldType(i); var idx = -1; if (type != ret.FieldTypes[i]) { code = Type.GetTypeCode(type); idx = varyingTypes.IndexOf(type); if (idx < 0) { varyingTypes.Add(type); idx = varyingTypes.Count - 1; } } switch (code) { case TypeCode.Decimal : data[i] = rd.GetDecimal (i).ToString(CultureInfo.InvariantCulture); break; case TypeCode.Double : data[i] = rd.GetDouble (i).ToString(CultureInfo.InvariantCulture); break; case TypeCode.Single : data[i] = rd.GetFloat (i).ToString(CultureInfo.InvariantCulture); break; case TypeCode.DateTime : data[i] = rd.GetDateTime(i).ToString("o"); break; case TypeCode.Boolean : data[i] = rd.GetBoolean (i).ToString(CultureInfo.InvariantCulture); break; default : { if (type == typeof(DateTimeOffset)) { var dt = rd.GetValue(i); if (dt is DateTime) data[i] = ((DateTime)dt).ToString("o"); else if (dt is DateTimeOffset) data[i] = ((DateTimeOffset)dt).ToString("o"); else data[i] = rd.GetValue(i).ToString(); } else if (ret.FieldTypes[i] == typeof(byte[])) data[i] = Convert.ToBase64String((byte[])rd.GetValue(i)); else data[i] = (rd.GetValue(i) ?? "").ToString(); break; } } if (idx >= 0) data[i] = "\0" + (char)idx + data[i]; } } ret.Data.Add(data); } ret.VaryingTypes = varyingTypes.ToArray(); return LinqServiceSerializer.Serialize(ret); } } } catch (Exception exception) { HandleException(exception); throw; } }
/// <summary> /// The update planning element data. /// </summary> /// <param name="factory"> /// The factory. /// </param> /// <param name="project"> /// The project. /// </param> /// <param name="planningElementList"> /// The planning element list. /// </param> /// <exception cref="InvalidOperationException"> /// Thrown when the database source is unknown. /// </exception> public void UpdatePlanningElementData( string factory, string project, List<PlanningElementDataUpdate> planningElementList ) { project = Util.CorrectProjectName( project ); switch( ImpactDatabase.DataSource ) { case DataSource.Ingres92: case DataSource.Ingres100: { // Create a query that return all the id's of those elements that aldready has planning data. ImpactQuery query = new ImpactQuery() { From = { ImpModelPlanning.As( "T1" ) }, Select = { ImpModelPlanning.ElementId }, Where = { ImpModelPlanning.Factory.Equal(factory), ImpModelPlanning.Project.Equal(project), ImpModelPlanning.ElementId.In<int>( planningElementList.Select( element => element.ElementId ) ) } }; using( var database = new ImpactDatabase() ) { var existingIds = database.GetAll( query.ToString(), reader => reader[0].Cast<int>() ); var statementList = new List<string>( planningElementList.Count ); foreach( var element in planningElementList ) { string producingFactory; if( 0 == string.Compare( Factory.External.Number, element.ProducingFactory, StringComparison.OrdinalIgnoreCase ) ) { producingFactory = ProjectBrowserLoader.ProducingFactoryExternalValue; } else { producingFactory = element.ProducingFactory; } if( existingIds.Contains( element.ElementId ) ) { // We have an update. var update = new ImpactUpdate( ImpModelPlanning.Instance ) { Where = { ImpModelPlanning.Factory.Equal( factory ), ImpModelPlanning.Project.Equal( project ), ImpModelPlanning.ElementId.Equal( element.ElementId ), }, Columns = { { ImpModelPlanning.ProductionFactory, producingFactory ?? string.Empty }, { ImpModelPlanning.DivisionProduction, element.Division ?? string.Empty }, { ImpModelPlanning.ProductionDate, element.ProductionDate }, { ImpModelPlanning.DeliveryDate, element.DeliveryDate }, { ImpModelPlanning.ErectionSequenceNo, element.ErectionSequenceNo }, { ImpModelPlanning.PlannedDrawingDate, element.PlannedDrawingDate }, { ImpModelPlanning.PlannedProductionDate, element.PlannedProductionDate }, { ImpModelPlanning.PlannedReadyForDeliveryDate, element.PlannedStorageDate }, { ImpModelPlanning.PlannedDeliveryDate, element.PlannedDeliveryDate }, { ImpModelPlanning.PlannedErectionDate, element.PlannedErectionDate }, { ImpModelPlanning.ElementIdStatus, element.Status }, } }; statementList.Add( update.ToString() ); } else { // We must insert a new row. var insert = new ImpactInsert( ImpModelPlanning.Instance ) { Columns = { { ImpModelPlanning.Factory, factory }, { ImpModelPlanning.Project, project }, { ImpModelPlanning.ElementId, element.ElementId }, { ImpModelPlanning.ProductionFactory, producingFactory ?? string.Empty }, { ImpModelPlanning.DivisionProduction, element.Division ?? string.Empty }, { ImpModelPlanning.ProductionDate, element.ProductionDate }, { ImpModelPlanning.DeliveryDate, element.DeliveryDate }, { ImpModelPlanning.ErectionSequenceNo, element.ErectionSequenceNo }, { ImpModelPlanning.PlannedDrawingDate, element.PlannedDrawingDate }, { ImpModelPlanning.PlannedProductionDate, element.PlannedProductionDate }, { ImpModelPlanning.PlannedReadyForDeliveryDate, element.PlannedStorageDate }, { ImpModelPlanning.PlannedDeliveryDate, element.PlannedDeliveryDate }, { ImpModelPlanning.PlannedErectionDate, element.PlannedErectionDate }, { ImpModelPlanning.ElementIdStatus, element.Status }, } }; statementList.Add( insert.ToString() ); } } database.ExecuteNonQuery( statementList.ToArray() ); } break; } case DataSource.SqlServer: case DataSource.SqlServerExpress: { List<string> statementList = new List<string>( planningElementList.Count ); foreach( var element in planningElementList ) { string producingFactory; if( 0 == string.Compare( Factory.External.Number, element.ProducingFactory, StringComparison.OrdinalIgnoreCase ) ) { producingFactory = ProjectBrowserLoader.ProducingFactoryExternalValue; } else { producingFactory = element.ProducingFactory; } ImpactInsertOrUpdate insertOrUpdate = new ImpactInsertOrUpdate( ImpModelPlanning.Instance ) { Keys = { { ImpModelPlanning.Factory, factory }, { ImpModelPlanning.Project, project }, { ImpModelPlanning.ElementId, element.ElementId }, }, Columns = { { ImpModelPlanning.ProductionFactory, producingFactory ?? string.Empty }, { ImpModelPlanning.DivisionProduction, element.Division ?? string.Empty }, { ImpModelPlanning.ProductionDate, element.ProductionDate }, { ImpModelPlanning.DeliveryDate, element.DeliveryDate }, { ImpModelPlanning.ErectionSequenceNo, element.ErectionSequenceNo }, { ImpModelPlanning.PlannedDrawingDate, element.PlannedDrawingDate }, { ImpModelPlanning.PlannedProductionDate, element.PlannedProductionDate }, { ImpModelPlanning.PlannedReadyForDeliveryDate, element.PlannedStorageDate }, { ImpModelPlanning.PlannedDeliveryDate, element.PlannedDeliveryDate }, { ImpModelPlanning.PlannedErectionDate, element.PlannedErectionDate }, { ImpModelPlanning.ElementIdStatus, element.Status }, } }; statementList.Add( insertOrUpdate.ToString() ); } using( var database = new ImpactDatabase() ) { database.ExecuteNonQuery( statementList.ToArray() ); } break; } default: { throw new InvalidOperationException( "Unknown database source." ); } } }
string[] SplitStream( Stream s ) { var list = new List<string>(); using ( var r = new StreamReader( s ) ) while ( !r.EndOfStream ) list.Add( r.ReadLine() ); return list.ToArray(); }
/// <summary> /// Gets a comma-delimited list of the IDs of the selected location types. /// </summary> /// <returns>A comma-delimited list of the IDs of the selected location types.</returns> private string GetLocationTypeList() { List<string> locationTypeIds = new List<string>(); foreach (ListItem li in this.lbLocationType.Items) { if (li.Selected) { locationTypeIds.Add(li.Value); } } if (locationTypeIds.Count == 0) { locationTypeIds.Add(this.lbLocationType.Items[0].Value); } return string.Join(",", locationTypeIds.ToArray()); }
public static IEnumerable<IChain> Build(IEnumerable<ISnapshotProvider> sources, IEnumerable<ISnapshotConsumer> sinks, IEnumerable<IMultipleSnapshotConsumer> multiSinks, IEnumerable<ChainElement> configs) { var chains = new List<IChain>(); sources = sources.ToArray(); sinks = sinks.ToArray(); multiSinks = multiSinks.ToArray(); foreach(var config in configs) { try { if (!sources.Any(s => config.Sources.Split(',').Any(i => i.Equals(s.Id)))) { Log.Warn(string.Format("Couldn't find source '{0}' in the set of sources supplied for chain '{1}'.", config.Sources, config.Id)); continue; } if (!sinks.Any(s => config.Sinks.Split(',').Any(i => i.Equals(s.Id))) && !multiSinks.Any(s => config.MultiSinks.Split(',').Any(i => i.Equals(s.Id)))) { Log.Warn(string.Format("Couldn't find one of sinks '{0}' in the set of sinks and multisinks supplied for chain '{1}'.", config.Sinks, config.Id)); continue; } if (!string.IsNullOrEmpty(config.Sinks) && config.Sources.Split(',').Count().Equals(1)) { var chosenSource = sources.First(s => s.Id.Equals(config.Sources)); var chosenSinks = new List<ISnapshotConsumer>(); foreach (var sinkName in config.Sinks.Split(',')) { chosenSinks.Add(sinks.First(s => s.Id.Equals(sinkName))); } chains.Add(new MultipleSinkChain(config.Id, config.Name, chosenSource, chosenSinks.ToArray())); } if (!string.IsNullOrEmpty(config.MultiSinks)) { var chosenSink = multiSinks.First(s => s.Id.Equals(config.MultiSinks)); var chosenSources = new List<ISnapshotProvider>(); foreach(var _ in config.Sources.Split(',')) { var sourceName = _.TrimStart(' '); chosenSources.Add(sources.First(s => s.Id.Equals(sourceName))); } chains.Add(new MultipleSourceChain(config.Id, config.Name, chosenSink, chosenSources.ToArray())); } } catch (InvalidOperationException ioe) { Log.Warn(string.Format("Couldn't construct chain: '{0}' '{1}' '{2}': {3}", config.Id, config.Sources, config.Sinks, ioe.Message)); } } return chains; }
string[] Gen() { var l = new List<string>(); var lines = r.Next( 1000 ) + 500; for ( int i = 0 ; i < lines ; i++ ) l.Add( ( ( char ) ( 'a' + r.Next( 26 ) ) ).ToString() ); return l.ToArray(); }
public static Mesh CombineMeshes(List<SVGMesh> meshes, out SVGLayer[] layers, out Shader[] shaders, SVGUseGradients useGradients = SVGUseGradients.Always, SVGAssetFormat format = SVGAssetFormat.Transparent, bool compressDepth = true) { layers = new SVGLayer[0]; shaders = new Shader[0]; //if(SVGAssetImport.sliceMesh) Create9Slice(); SVGFill fill; bool useOpaqueShader = false; bool useTransparentShader = false; bool hasGradients = (useGradients == SVGUseGradients.Always); int totalMeshes = meshes.Count, totalTriangles = 0, opaqueTriangles = 0, transparentTriangles = 0; // Z Sort meshes if(format == SVGAssetFormat.Opaque) { if(compressDepth) { SVGBounds meshBounds = SVGBounds.InfiniteInverse; for (int i = 0; i < totalMeshes; i++) { if (meshes [i] == null) continue; meshBounds.Encapsulate(meshes [i].bounds); } if(!meshBounds.isInfiniteInverse) { SVGGraphics.depthTree.Clear(); SVGGraphics.depthTree = new SVGDepthTree(meshBounds); for (int i = 0; i < totalMeshes; i++) { fill = meshes [i]._fill; SVGMesh[] nodes = SVGGraphics.depthTree.TestDepthAdd(meshes [i], new SVGBounds(meshes [i]._bounds)); int nodesLength = 0; if(nodes == null || nodes.Length == 0) { meshes [i]._depth = 0; } else { nodesLength = nodes.Length; int highestDepth = 0; SVGMesh highestMesh = null; for(int j = 0; j < nodesLength; j++) { if(nodes[j].depth > highestDepth) { highestDepth = nodes[j].depth; highestMesh = nodes[j]; } } if(fill.blend == FILL_BLEND.OPAQUE) { meshes [i]._depth = highestDepth + 1; } else { if(highestMesh != null && highestMesh.fill.blend == FILL_BLEND.OPAQUE) { meshes [i]._depth = highestDepth + 1; } else { meshes [i]._depth = highestDepth; } } } meshes [i].UpdateDepth(); } } } else { int highestDepth = 0; for (int i = 0; i < totalMeshes; i++) { fill = meshes [i]._fill; if (fill.blend == FILL_BLEND.OPAQUE || lastBlendType == FILL_BLEND.OPAQUE) { meshes[i]._depth = ++highestDepth; } else { meshes[i]._depth = highestDepth; } lastBlendType = fill.blend; meshes[i].UpdateDepth(); } } } layers = new SVGLayer[totalMeshes]; int totalVertices = 0, vertexCount, vertexStart, currentVertex; for(int i = 0; i < totalMeshes; i++) { fill = meshes[i]._fill; if(fill.blend == FILL_BLEND.OPAQUE) { opaqueTriangles += meshes[i]._triangles.Length; useOpaqueShader = true; } else if(fill.blend == FILL_BLEND.ALPHA_BLENDED) { transparentTriangles += meshes[i]._triangles.Length; useTransparentShader = true; } if(fill.fillType == FILL_TYPE.GRADIENT) hasGradients = true; vertexCount = meshes[i]._vertices.Length; Bounds bounds = meshes[i]._bounds; layers[i] = new SVGLayer(meshes[i]._name, totalVertices, vertexCount, bounds.center, bounds.size); totalVertices += vertexCount; } totalTriangles = opaqueTriangles + transparentTriangles; if(useGradients == SVGUseGradients.Never) hasGradients = false; if(format != SVGAssetFormat.Opaque) { useOpaqueShader = false; useTransparentShader = true; } Vector3[] vertices = new Vector3[totalVertices]; Color32[] colors32 = new Color32[totalVertices]; Vector2[] uv = null; Vector2[] uv2 = null; int[][] triangles = null; for(int i = 0; i < totalMeshes; i++) { vertexStart = layers[i].vertexStart; vertexCount = layers[i].vertexCount; for(int j = 0; j < vertexCount; j++) { currentVertex = vertexStart + j; vertices[currentVertex] = meshes[i]._vertices[j]; colors32[currentVertex] = meshes[i]._colors[j]; } } List<Shader> outputShaders = new List<Shader>(); // Debug.Log("hasGradients: "+hasGradients); if(hasGradients) { uv = new Vector2[totalVertices]; uv2 = new Vector2[totalVertices]; for(int i = 0; i < totalMeshes; i++) { vertexStart = layers[i].vertexStart; vertexCount = layers[i].vertexCount; for(int j = 0; j < vertexCount; j++) { currentVertex = vertexStart + j; uv[currentVertex] = meshes[i]._uvs[j]; uv2[currentVertex] = meshes[i]._uvs2[j]; } } if(useOpaqueShader) { outputShaders.Add(SVGShader.GradientColorOpaque); } if(useTransparentShader) { outputShaders.Add(SVGShader.GradientColorAlphaBlended); } } else { if(useOpaqueShader) { outputShaders.Add(SVGShader.SolidColorOpaque); } if(useTransparentShader) { outputShaders.Add(SVGShader.SolidColorAlphaBlended); } } if(useOpaqueShader && useTransparentShader) { triangles = new int[2][]{new int[opaqueTriangles], new int[transparentTriangles]}; int lastVertexIndex = 0; int triangleCount; int lastOpauqeTriangleIndex = 0; int lastTransparentTriangleIndex = 0; for(int i = 0; i < totalMeshes; i++) { triangleCount = meshes[i]._triangles.Length; if(meshes[i]._fill.blend == FILL_BLEND.OPAQUE) { for(int j = 0; j < triangleCount; j++) { triangles[0][lastOpauqeTriangleIndex++] = lastVertexIndex + meshes[i]._triangles[j]; } } else { for(int j = 0; j < triangleCount; j++) { triangles[1][lastTransparentTriangleIndex++] = lastVertexIndex + meshes[i]._triangles[j]; } } lastVertexIndex += layers[i].vertexCount; } } else { triangles = new int[1][]{new int[totalTriangles]}; int lastVertexIndex = 0; int triangleCount; int lastTriangleIndex = 0; for(int i = 0; i < totalMeshes; i++) { triangleCount = meshes[i]._triangles.Length; for(int j = 0; j < triangleCount; j++) { triangles[0][lastTriangleIndex++] = lastVertexIndex + meshes[i]._triangles[j]; } lastVertexIndex += layers[i].vertexCount; } } if(outputShaders.Count != 0) shaders = outputShaders.ToArray(); Mesh output = new Mesh(); output.vertices = vertices; output.colors32 = colors32; if(hasGradients) { output.uv = uv; output.uv2 = uv2; } if(triangles.Length == 1) { output.triangles = triangles[0]; } else { output.subMeshCount = triangles.Length; for(int i = 0; i < triangles.Length; i++) { output.SetTriangles(triangles[i], i); } } return output; }
public TOFChannelSet TOFDemodulateBlock(Block b, int detectorIndex, bool allChannels) { // *** demodulate channels *** // ** build the list of modulations ** List<string> modNames = new List<string>(); List<Waveform> modWaveforms = new List<Waveform>(); foreach (AnalogModulation mod in b.Config.AnalogModulations) { modNames.Add(mod.Name); modWaveforms.Add(mod.Waveform); } foreach (DigitalModulation mod in b.Config.DigitalModulations) { modNames.Add(mod.Name); modWaveforms.Add(mod.Waveform); } foreach (TimingModulation mod in b.Config.TimingModulations) { modNames.Add(mod.Name); modWaveforms.Add(mod.Waveform); } // ** work out the switch state for each point ** int blockLength = modWaveforms[0].Length; List<bool[]> wfBits = new List<bool[]>(); foreach (Waveform wf in modWaveforms) wfBits.Add(wf.Bits); List<uint> switchStates = new List<uint>(blockLength); for (int i = 0; i < blockLength; i++) { uint switchState = 0; for (int j = 0; j < wfBits.Count; j++) { if (wfBits[j][i]) switchState += (uint)Math.Pow(2, j); } switchStates.Add(switchState); } // pre-calculate the state signs for each analysis channel // the first index selects the analysis channel, the second the switchState int numStates = (int)Math.Pow(2, modWaveforms.Count); bool[,] stateSigns = new bool[numStates, numStates]; // make a BlockDemodulator just to use its stateSign code // They should probably share a base class. BlockDemodulator bd = new BlockDemodulator(); for (uint i = 0; i < numStates; i++) { for (uint j = 0; j < numStates; j++) { stateSigns[i, j] = (bd.stateSign(j, i) == 1); } } TOFChannelSet tcs = new TOFChannelSet(); // By setting all channels to false only a limited number of channels are analysed, // namely those required to extract the edm (and the correction term). This speeds // up the execution enormously when the BlockTOFDemodulator is used by the // BlockDemodulator for calculating the non-linear channel combinations. //int[] channelsToAnalyse; List<int> channelsToAnalyse; if (allChannels) { //channelsToAnalyse = new int[numStates]; channelsToAnalyse = new List<int>(); //for (int i = 0; i < numStates; i++) channelsToAnalyse[i] = i; for (int i = 0; i < numStates; i++) channelsToAnalyse.Add(i); } else { // just the essential channels - this code is a little awkward because, like // so many bits of the analysis code, it was added long after the original // code was written, and goes against some assumptions that were made back then! int bIndex = modNames.IndexOf("B"); int dbIndex = modNames.IndexOf("DB"); int eIndex = modNames.IndexOf("E"); int rf1fIndex = modNames.IndexOf("RF1F"); int rf2fIndex = modNames.IndexOf("RF2F"); int rf1aIndex = modNames.IndexOf("RF1A"); int rf2aIndex = modNames.IndexOf("RF2A"); int lf1Index = modNames.IndexOf("LF1"); int lf2Index = modNames.IndexOf("LF2"); int sigChannel = 0; int bChannel = (1 << bIndex); int dbChannel = (1 << dbIndex); int ebChannel = (1 << eIndex) + (1 << bIndex); int edbChannel = (1 << eIndex) + (1 << dbIndex); int dbrf1fChannel = (1 << dbIndex) + (1 << rf1fIndex); int dbrf2fChannel = (1 << dbIndex) + (1 << rf2fIndex); int brf1fChannel = (1 << bIndex) + (1 << rf1fIndex); int brf2fChannel = (1 << bIndex) + (1 << rf2fIndex); int edbrf1fChannel = (1 << eIndex) + (1 << dbIndex) + (1 << rf1fIndex); int edbrf2fChannel = (1 << eIndex) + (1 << dbIndex) + (1 << rf2fIndex); int ebdbChannel = (1 << eIndex) + (1 << bIndex) + (1 << dbIndex); int rf1fChannel = (1 << rf1fIndex); int rf2fChannel = (1 << rf2fIndex); int erf1fChannel = (1 << eIndex) + (1 << rf1fIndex); int erf2fChannel = (1 << eIndex) + (1 << rf2fIndex); int rf1aChannel = (1 << rf1aIndex); int rf2aChannel = (1 << rf2aIndex); int dbrf1aChannel = (1 << dbIndex) + (1 << rf1aIndex); int dbrf2aChannel = (1 << dbIndex) + (1 << rf2aIndex); int lf1Channel = (1 << lf1Index); int dblf1Channel = (1 << dbIndex) + (1 << lf1Index); channelsToAnalyse = new List<int>() { sigChannel, bChannel, dbChannel, ebChannel, edbChannel, dbrf1fChannel, dbrf2fChannel, brf1fChannel, brf2fChannel, edbrf1fChannel, edbrf2fChannel, ebdbChannel, rf1fChannel, rf2fChannel, erf1fChannel, erf2fChannel, rf1aChannel, rf2aChannel, dbrf1aChannel, dbrf2aChannel, lf1Channel, dblf1Channel, }; if (lf2Index != -1) // Index = -1 if "LF2" not found { int lf2Channel = (1 << lf2Index); channelsToAnalyse.Add(lf2Channel); int dblf2Channel = (1 << dbIndex) + (1 << lf2Index); channelsToAnalyse.Add(dblf2Channel); } //channelsToAnalyse = new int[] { bChannel, dbChannel, ebChannel, edbChannel, dbrf1fChannel, // dbrf2fChannel, brf1fChannel, brf2fChannel, edbrf1fChannel, edbrf2fChannel, ebdbChannel, // rf1fChannel, rf2fChannel, erf1fChannel, erf2fChannel, rf1aChannel, rf2aChannel, dbrf1aChannel, // dbrf2aChannel, lf1Channel, dblf1Channel, lf2Channel, dblf2Channel //}; } foreach (int channel in channelsToAnalyse) { // generate the Channel TOFChannel tc = new TOFChannel(); TOF tOn = new TOF(); TOF tOff = new TOF(); for (int i = 0; i < blockLength; i++) { if (stateSigns[channel, switchStates[i]]) tOn += ((TOF)((EDMPoint)(b.Points[i])).Shot.TOFs[detectorIndex]); else tOff += ((TOF)((EDMPoint)(b.Points[i])).Shot.TOFs[detectorIndex]); } tOn /= (blockLength / 2); tOff /= (blockLength / 2); tc.On = tOn; tc.Off = tOff; // This "if" is to take care of the case of the "SIG" channel, for which there // is no off TOF. if (tc.Off.Length != 0) tc.Difference = tc.On - tc.Off; else tc.Difference = tc.On; // add the Channel to the ChannelSet List<string> usedSwitches = new List<string>(); for (int i = 0; i < modNames.Count; i++) if ((channel & (1 << i)) != 0) usedSwitches.Add(modNames[i]); string[] channelName = usedSwitches.ToArray(); // the SIG channel has a special name if (channel == 0) channelName = new string[] {"SIG"}; tcs.AddChannel(channelName, tc); } // ** add the special channels ** // extract the TOFChannels that we need. TOFChannel c_eb = (TOFChannel)tcs.GetChannel(new string[] { "E", "B" }); TOFChannel c_edb = (TOFChannel)tcs.GetChannel(new string[] {"E", "DB"}); TOFChannel c_dbrf1f = (TOFChannel)tcs.GetChannel(new string[] { "DB", "RF1F" }); TOFChannel c_dbrf2f = (TOFChannel)tcs.GetChannel(new string[] { "DB", "RF2F" }); TOFChannel c_b = (TOFChannel)tcs.GetChannel(new string[] { "B" }); TOFChannel c_db = (TOFChannel)tcs.GetChannel(new string[] { "DB" }); TOFChannel c_sig = (TOFChannel)tcs.GetChannel(new string[] { "SIG" }); TOFChannel c_brf1f = (TOFChannel)tcs.GetChannel(new string[] { "B", "RF1F" }); TOFChannel c_brf2f = (TOFChannel)tcs.GetChannel(new string[] { "B", "RF2F" }); TOFChannel c_edbrf1f = (TOFChannel)tcs.GetChannel(new string[] { "E", "DB", "RF1F" }); TOFChannel c_edbrf2f = (TOFChannel)tcs.GetChannel(new string[] { "E", "DB", "RF2F" }); TOFChannel c_ebdb= (TOFChannel)tcs.GetChannel(new string[] { "E", "B", "DB" }); TOFChannel c_rf1f = (TOFChannel)tcs.GetChannel(new string[] { "RF1F" }); TOFChannel c_rf2f = (TOFChannel)tcs.GetChannel(new string[] { "RF2F" }); TOFChannel c_erf1f = (TOFChannel)tcs.GetChannel(new string[] { "E", "RF1F" }); TOFChannel c_erf2f = (TOFChannel)tcs.GetChannel(new string[] { "E", "RF2F" }); TOFChannel c_rf1a = (TOFChannel)tcs.GetChannel(new string[] { "RF1A" }); TOFChannel c_rf2a = (TOFChannel)tcs.GetChannel(new string[] { "RF2A" }); TOFChannel c_dbrf1a = (TOFChannel)tcs.GetChannel(new string[] { "DB", "RF1A" }); TOFChannel c_dbrf2a = (TOFChannel)tcs.GetChannel(new string[] { "DB", "RF2A" }); TOFChannel c_lf1 = (TOFChannel)tcs.GetChannel(new string[] { "LF1" }); TOFChannel c_dblf1 = (TOFChannel)tcs.GetChannel(new string[] { "DB", "LF1" }); TOFChannel c_lf2; TOFChannel c_dblf2; if (modNames.IndexOf("LF2") == -1) // Index = -1 if "LF2" not found { TOF tofTemp = new TOF(); TOFChannel tcTemp = new TOFChannel(); // For many blocks there is no LF2 channel (and hence switch states). // To get around this problem I will populate the TOFChannel with "SIG" // It will then be obvious in the analysis when LF2 takes on real values. for (int i = 0; i < blockLength; i++) { tofTemp += ((TOF)((EDMPoint)(b.Points[i])).Shot.TOFs[detectorIndex]); } tofTemp /= (blockLength / 2); tcTemp.On = tofTemp; tcTemp.Off = tofTemp; tcTemp.Difference = tofTemp; c_lf2 = tcTemp; c_dblf2 = tcTemp; } else { c_lf2 = (TOFChannel)tcs.GetChannel(new string[] { "LF2" }); c_dblf2 = (TOFChannel)tcs.GetChannel(new string[] { "DB", "LF2" }); } // work out some intermediate terms for the full, corrected edm. The names // refer to the joint power of c_db and c_b in the term. TOFChannel squaredTerms = (((c_db * c_db) - (c_dbrf1f * c_dbrf1f) - (c_dbrf2f * c_dbrf2f)) * c_eb) - (c_b * c_db * c_edb); // this is missing the term /beta c_db c_ebdb at the moment, mainly because // I've no idea what beta should be. TOFChannel linearTerms = (c_b * c_dbrf1f * c_edbrf1f) + (c_b * c_dbrf2f * c_edbrf2f) - (c_db * c_brf1f * c_edbrf1f) - (c_db * c_brf2f * c_edbrf2f); TOFChannel preDenominator = (c_db * c_db * c_db) + (c_dbrf1f * c_edb * c_edbrf1f) + (c_dbrf1f * c_edb * c_edbrf1f) + (c_dbrf2f * c_edb * c_edbrf2f) + (c_dbrf2f * c_edb * c_edbrf2f) - c_db * ( (c_dbrf1f * c_dbrf1f) + (c_dbrf2f * c_dbrf2f) + (c_edb * c_edb) + (c_edbrf1f * c_edbrf1f) + (c_edbrf2f * c_edbrf2f) ); // it's important when working out the non-linear channel // combinations to always keep them dimensionless. If you // don't you'll run into trouble with integral vs. average // signal. TOFChannel edmDB = c_eb / c_db; tcs.AddChannel(new string[] { "EDMDB" }, edmDB); // The corrected edm channel. This should be proportional to the edm phase. TOFChannel edmCorrDB = (squaredTerms + linearTerms) / preDenominator; tcs.AddChannel(new string[] { "EDMCORRDB" }, edmCorrDB); // It's useful to have an estimate of the size of the correction. Here // we return the difference between the corrected edm channel and the // naive guess, edmDB. TOFChannel correctionDB = edmCorrDB - edmDB; tcs.AddChannel(new string[] { "CORRDB" }, correctionDB); // The "old" correction that just corrects for the E-correlated amplitude change. // This is included in the dblocks for debugging purposes. TOFChannel correctionDB_old = (c_edb * c_b) / (c_db * c_db); tcs.AddChannel(new string[] { "CORRDB_OLD" }, correctionDB_old); TOFChannel edmCorrDB_old = edmDB - correctionDB_old; tcs.AddChannel(new string[] { "EDMCORRDB_OLD" }, edmCorrDB_old); // Normalised RFxF channels. TOFChannel rf1fDB = c_rf1f / c_db; tcs.AddChannel(new string[] { "RF1FDB" }, rf1fDB); TOFChannel rf2fDB = c_rf2f / c_db; tcs.AddChannel(new string[] { "RF2FDB" }, rf2fDB); // And RFxF.DB channels, again normalised to DB. The naming of these channels is quite // unfortunate, but it's just tough. TOFChannel rf1fDBDB = c_dbrf1f / c_db; tcs.AddChannel(new string[] { "RF1FDBDB" }, rf1fDBDB); TOFChannel rf2fDBDB = c_dbrf2f / c_db; tcs.AddChannel(new string[] { "RF2FDBDB" }, rf2fDBDB); // Normalised RFxAchannels. TOFChannel rf1aDB = c_rf1a / c_db; tcs.AddChannel(new string[] { "RF1ADB" }, rf1aDB); TOFChannel rf2aDB = c_rf2a / c_db; tcs.AddChannel(new string[] { "RF2ADB" }, rf2aDB); // And RFxA.DB channels, again normalised to DB. The naming of these channels is quite // unfortunate, but it's just tough. TOFChannel rf1aDBDB = c_dbrf1a / c_db; tcs.AddChannel(new string[] { "RF1ADBDB" }, rf1aDBDB); TOFChannel rf2aDBDB = c_dbrf2a / c_db; tcs.AddChannel(new string[] { "RF2ADBDB" }, rf2aDBDB); // the E.RFxF channels, normalized to DB TOFChannel erf1fDB = c_erf1f / c_db; tcs.AddChannel(new string[] { "ERF1FDB" }, erf1fDB); TOFChannel erf2fDB = c_erf2f / c_db; tcs.AddChannel(new string[] { "ERF2FDB" }, erf2fDB); // the E.RFxF.DB channels, normalized to DB, again dodgy naming convention. TOFChannel erf1fDBDB = c_edbrf1f / c_db; tcs.AddChannel(new string[] { "ERF1FDBDB" }, erf1fDBDB); TOFChannel erf2fDBDB = c_edbrf2f / c_db; tcs.AddChannel(new string[] { "ERF2FDBDB" }, erf2fDBDB); // the LF1 channel, normalized to DB TOFChannel lf1DB = c_lf1 / c_db; tcs.AddChannel(new string[] { "LF1DB" }, lf1DB); TOFChannel lf1DBDB = c_dblf1 / c_db; tcs.AddChannel(new string[] { "LF1DBDB" }, lf1DBDB); // the LF2 channel, normalized to DB TOFChannel lf2DB = c_lf2 / c_db; tcs.AddChannel(new string[] { "LF2DB" }, lf2DB); TOFChannel lf2DBDB = c_dblf2 / c_db; tcs.AddChannel(new string[] { "LF2DBDB" }, lf2DBDB); TOFChannel bDB = c_b / c_db; tcs.AddChannel(new string[] { "BDB" }, bDB); // we also need to extract the rf-step induced phase shifts. These come out in the // B.RFxF channels, but like the edm, need to be corrected. I'm going to use just the // simplest level of correction for these. TOFChannel brf1fCorrDB = (c_brf1f / c_db) - ((c_b * c_dbrf1f) / (c_db * c_db)); tcs.AddChannel(new string[] { "BRF1FCORRDB" }, brf1fCorrDB); TOFChannel brf2fCorrDB = (c_brf2f / c_db) - ((c_b * c_dbrf2f) / (c_db * c_db)); tcs.AddChannel(new string[] { "BRF2FCORRDB" }, brf2fCorrDB); //Some extra channels for various shot noise calculations, these are a bit weird tcs.AddChannel(new string[] { "SIGNL" }, c_sig); tcs.AddChannel(new string[] { "ONEOVERDB" }, 1/c_db); TOFChannel dbSigNL = new TOFChannel(); dbSigNL.On = c_db.On/c_sig.On; dbSigNL.Off = c_db.Off/c_sig.On;; dbSigNL.Difference = c_db.Difference / c_sig.Difference; tcs.AddChannel(new string[] { "DBSIG" }, dbSigNL); TOFChannel dbdbSigSigNL = dbSigNL * dbSigNL; tcs.AddChannel(new string[] { "DBDBSIGSIG" }, dbdbSigSigNL); TOFChannel SigdbdbNL = new TOFChannel(); SigdbdbNL.On = c_sig.On / ( c_db.On* c_db.On); SigdbdbNL.Off = c_sig.On / ( c_db.Off * c_db.Off); SigdbdbNL.Difference = c_sig.Difference / (c_db.Difference * c_db.Difference); tcs.AddChannel(new string[] { "SIGDBDB" }, SigdbdbNL); return tcs; }
private void GetWordsFromBoxAndLogContext() { string[] tempphrases = txtSearhBox.Text.Split(' '); List<string> phrases = new List<string>(); for (int i = 0; i < tempphrases.Length; i++) { string trimmed = tempphrases[i].Trim(); if (!String.IsNullOrEmpty(trimmed)) { phrases.Add(trimmed); } } if (phrases.Count > 0) { phraseLogger_.LogPhrasesToDB(phrases.ToArray()); } }
public Recipe[] ReadRecipes(AuthIdentity identity, Guid[] recipeIds, ReadRecipeOptions options) { using (var session = this.GetSession()) { var recipes = session.QueryOver<Recipes>() .Fetch(prop => prop.RecipeMetadata).Eager .Fetch(prop => prop.Ingredients).Eager .Fetch(prop => prop.Ingredients[0].Ingredient).Eager .Fetch(prop => prop.Ingredients[0].IngredientForm).Eager .AndRestrictionOn(p => p.RecipeId).IsInG(recipeIds) .TransformUsing(Transformers.DistinctRootEntity) .List(); if (!recipes.Any()) { throw new RecipeNotFoundException(); } var ret = new List<Recipe>(); foreach (var recipie in recipes) { var recipe = new Recipe { Id = recipie.RecipeId, Title = recipie.Title, Description = recipie.Description, DateEntered = recipie.DateEntered, ImageUrl = recipie.ImageUrl, ServingSize = recipie.ServingSize, PreparationTime = recipie.PrepTime, CookTime = recipie.CookTime, Credit = recipie.Credit, CreditUrl = recipie.CreditUrl, AvgRating = recipie.Rating }; if (options.ReturnMethod) { recipe.Method = recipie.Steps; } if (options.ReturnUserRating) { var id = recipie.RecipeId; var rating = session.QueryOver<RecipeRatings>() .Where(p => p.Recipe.RecipeId == id) .Where(p => p.UserId == identity.UserId) .SingleOrDefault(); recipe.UserRating = rating == null ? Rating.None : (Rating)rating.Rating; } recipe.Ingredients = recipie.Ingredients.Select(i => new IngredientUsage { Amount = i.Qty.HasValue ? new Amount(i.Qty.Value, i.Unit) : null, PreparationNote = i.PrepNote, Section = i.Section, Form = i.IngredientForm != null ? i.IngredientForm.AsIngredientForm() : null, // Note: Form will be null when usage has no amount Ingredient = i.Ingredient.AsIngredient() }).ToArray(); recipe.Tags = recipie.RecipeMetadata.Tags; ret.Add(recipe); } return ret.ToArray(); } }
public ShoppingList[] GetShoppingLists(AuthIdentity identity, IList<ShoppingList> lists, GetShoppingListOptions options) { using (var session = this.GetSession()) { var loadDef = true; var query = session.QueryOver<ShoppingLists>() .Where(p => p.UserId == identity.UserId); if (lists != null) { loadDef = lists.Contains(ShoppingList.Default); var ids = lists.Where(l => l.Id.HasValue).Select(l => l.Id.Value).ToArray(); query = query.AndRestrictionOn(x => x.ShoppingListId).IsInG(ids); } var shoppingListses = query.List(); var ret = new List<ShoppingList>(); if (loadDef) { ret.Add(ShoppingList.Default); } ret.AddRange(shoppingListses.Select(l => l.AsShoppingList())); if (!options.LoadItems) { return ret.ToArray(); } // Load items into each list ICriterion filter = loadDef ? Restrictions.Or(Restrictions.IsNull("ShoppingList"), Restrictions.InG("ShoppingList", shoppingListses)) // Menu can be null, or in loaded menu list : Restrictions.InG("ShoppingList", shoppingListses); // Menu must be in loaded menu list var shoppingListItemses = session.QueryOver<ShoppingListItems>() .Fetch(prop => prop.Ingredient).Eager .Fetch(prop => prop.Recipe).Eager .Where(p => p.UserId == identity.UserId) .Where(filter) .List(); return ret.Select(m => new ShoppingList( m.Id, m.Title, (m.Id.HasValue ? shoppingListItemses.Where(f => f.ShoppingList != null && f.ShoppingList.ShoppingListId == m.Id) : shoppingListItemses.Where(f => f.ShoppingList == null)).Select(r => r.AsShoppingListItem()))).ToArray(); } }
public Menu[] GetMenus(AuthIdentity identity, IList<Menu> menus, GetMenuOptions options) { using (var session = this.GetSession()) { // menus will be null if all menus should be loaded, or a list of Menu objects to specify individual menus to load if (options == null) { throw new ArgumentNullException("options"); } if (identity == null) { throw new ArgumentNullException("identity"); } var loadFav = true; var query = session.QueryOver<Menus>() .Where(p => p.UserId == identity.UserId); if (menus != null) { loadFav = menus.Contains(Menu.Favorites); var ids = menus.Where(m => m.Id.HasValue).Select(m => m.Id.Value).ToArray(); query = query.AndRestrictionOn(p => p.MenuId).IsInG(ids); } var databaseMenues = query.List(); var ret = new List<Menu>(); if (loadFav) { ret.Add(Menu.Favorites); } ret.AddRange(databaseMenues.Select(m => m.AsMenu())); if (!options.LoadRecipes) { return ret.ToArray(); } // Load recipes into each menu ICriterion filter = loadFav ? Restrictions.Or(Restrictions.IsNull("Menu"), Restrictions.InG("Menu", databaseMenues)) // Menu can be null, or in loaded menu list : Restrictions.InG("Menu", databaseMenues); // Menu must be in loaded menu list var favorites = session.QueryOver<Favorites>() .Fetch(prop => prop.Recipe).Eager .Where(p => p.UserId == identity.UserId) .Where(filter) .List(); return ret.Select(m => new Menu(m) { Recipes = (m.Id.HasValue ? favorites.Where(f => f.Menu != null && f.Menu.MenuId == m.Id) : favorites.Where(f => f.Menu == null)).Select(r => r.Recipe.AsRecipeBrief()) .ToArray() }).ToArray(); } }
internal static byte[] ReplaceFilling(byte[] input, byte[] pattern, byte[] replacement) { if (pattern.Length == 0) { return input; } var result = new List<byte>(); int i; for (i = 0; i <= input.Length - pattern.Length; i++) { bool foundMatch = !pattern.Where((t, j) => input[i + j] != t).Any(); if (foundMatch) { result.AddRange(replacement); for (int k = 0; k < pattern.Length - replacement.Length; k++) { result.Add(0x00); } i += pattern.Length - 1; } else { result.Add(input[i]); } } for (; i < input.Length; i++) { result.Add(input[i]); } return result.ToArray(); }
private void ImgAddClick(object sender, ImageClickEventArgs e) { var selectedItems = new List<ListItem>(); foreach (ListItem li in lstItems.Items) { if (li.Selected) { selectedItems.Add(li); } } //check for single select mode if (AvailableSelectionMode == ListSelectionMode.Single && selectedItems.Count > 1) //shouldn't ever happen, SelectMode should be Single. BD { lblMessage.Text = Localization.GetString("ErrorOnlyOne", LocalResourceFile); } else { if (AvailableSelectionMode == ListSelectionMode.Single) { //just replace the current entry if we're in Single Select mode. BD lstSelectedItems.Items.Clear(); lstSelectedItems.Items.AddRange(selectedItems.ToArray()); } else { //check existing items, don't add again if already inserted. foreach (ListItem selectedItem in selectedItems) { if (!ItemIdExists(selectedItem.Value)) { //add the selected item lstSelectedItems.Items.Add(new ListItem(selectedItem.Text, selectedItem.Value)); } } } } }
private static object[] MapParameters(IRoute route, MethodInfo action) { var expectedMethodParameters = action.GetParameters(); var argumentsToPass = new List<object>(); foreach (ParameterInfo param in expectedMethodParameters) { var currentArgument = route.Parameters[param.Name]; if (param.ParameterType == typeof(int)) { argumentsToPass.Add(int.Parse(currentArgument)); } else { argumentsToPass.Add(currentArgument); } } return argumentsToPass.ToArray(); }
string[] GenLines() { var l = new List<string>(); var sb = new StringBuilder(); var lines = r.Next( 1000 ) + 500; for ( int i = 0 ; i < lines ; i++ ) { sb.Length = 0; var chars = r.Next( 100 ); for ( int c = 0 ; c < chars ; c++ ) sb.Append( ( ( char ) ( 'a' + r.Next( 26 ) ) ).ToString() ); l.Add( sb.ToString() ); } return l.ToArray(); }
public static void BBCS(CommandArguments args) { connection = args.GetConnection(); Processors.BBCS.REST.Common.HOST = args.Host; Processors.BBCS.REST.Common.USER = args.User; Processors.BBCS.REST.Common.PWD = args.Password; switch (args.BBCSAction) { case BBCSAction.Ping: Processors.BBCS.SOAP.Ping p = new Processors.BBCS.SOAP.Ping( ( new Processors.BBCS.SOAP.EmailSubmissionService() .proxy(Processors.BBCS.SOAP.EmailSubmissionService.HTTP_SCHEME, Processors.BBCS.SOAP.EmailSubmissionService.PATH_TRANSACTIONAL) )); if (p.Execute()) { Console.WriteLine("SOAP ping: success"); } else { Console.WriteLine("Failure"); } break; case BBCSAction.Bulk: DevLab.UI.BBCS inline = DevLab.UI.UIFactory.GetBBCSUI(); DevLab.UI.ScreenParameters inlineParam = inline.PrintMenu(connection); DataContext.BBCS.RequestFactory.GenerateMergeData = false; RequestParameters pm = new RequestParameters(); pm.MergeDataResourceName = connection.MergeData.FileName; pm.ContentResourceName = connection.Content.FileName; pm.Tag = inlineParam.tag; pm.RecipientCount = inlineParam.recipientCount; pm.GenerateMergedata = false; DataContext.BBCS.GeneralPurpose.Request r = DataContext.BBCS.RequestFactory.GetBulkRequest(pm); r.OrganizationID = connection.OrganizationId.ToString(); Processors.BBCS.REST.Submission.Bulk b = new Processors.BBCS.REST.Submission.Bulk(r, connection.BulkInline(), connection.AuthorizationHeader); b.Submit(); Console.WriteLine("Bulk submit: success"); break; case BBCSAction.BulkChunk: if (args.BBCSProtocol == BBCSProtocol.REST) { DevLab.UI.BBCS chunk = DevLab.UI.UIFactory.GetBBCSUI(); DevLab.UI.ScreenParameters chunkParm = chunk.PrintMenu(connection); RequestFactory.GenerateMergeData = true; Request _cr = RequestFactory.GetBulkCreateRequest(Guid.NewGuid(), chunkParm.tag, chunkParm.recipientCount); AddDataRequest _adr = RequestFactory.GetBulkAddDataRequest(_cr.JobId, chunkParm.tag, chunkParm.recipientCount); ProcessRequest _pr = RequestFactory.GetBulkProcessRequest(_cr.JobId); BulkCreate _bs = new BulkCreate(_cr, connection.BulkCreate(), connection.AuthorizationHeader); _bs.Submit(); chunk.PrintScreen(connection); BulkAddData _ba = new BulkAddData(_adr, connection.BulkAdd(), connection.AuthorizationHeader); _ba.Submit(); chunk.PrintScreen(connection); BulkProcess _br = new BulkProcess(_pr, connection.BulkProcess(), connection.AuthorizationHeader); _br.Submit(); chunk.PrintScreen(connection); Console.WriteLine("Success"); } else { Processors.BBCS.SOAP.GeneralPurpose.Chunk ch = new Processors.BBCS.SOAP.GeneralPurpose.Chunk( new Processors.BBCS.SOAP.EmailSubmissionService() .proxy(Processors.BBCS.SOAP.EmailSubmissionService.HTTPS_SCHEME, Processors.BBCS.SOAP.EmailSubmissionService.PATH_GENERAL_PURPOSE) ); ch.Jobid = Guid.NewGuid(); ch.Execute(); } break; case BBCSAction.BulkPing: break; case BBCSAction.ClientPing: break; case BBCSAction.AddClient: Processors.BBCS.REST.ClientResponse cr = Processors.BBCS.REST.Client.Submit(); Console.WriteLine("Client Id: " + cr.ID.ToString()); break; case BBCSAction.Inline: Processors.BBCS.SOAP.GeneralPurpose.Inline i = new Processors.BBCS.SOAP.GeneralPurpose.Inline( (new Processors.BBCS.SOAP.EmailSubmissionService() .proxy(Processors.BBCS.SOAP.EmailSubmissionService.HTTP_SCHEME, Processors.BBCS.SOAP.EmailSubmissionService.PATH_GENERAL_PURPOSE) )); bool result = i.Execute(); Console.WriteLine("Submitted: " + result.ToString()); break; case BBCSAction.JobStatus: switch (args.BBCSProtocol) { case BBCSProtocol.SOAP: System.Collections.Generic.List<StatusException> exceptions = new List<StatusException>(); string exceptionsPath = System.Configuration.ConfigurationManager.AppSettings.Get("ExceptionsList"); try { string path = System.Configuration.ConfigurationManager.AppSettings.Get("ClientListPath"); string[] clients = System.IO.File.ReadAllLines(path); for (int ii = 0; ii < clients.Length; ii++) { if (ii == 0) continue; string client = clients[ii]; string[] columns = client.Split(','); string uname = columns[0]; string pwd = columns[1]; string name = columns[2]; string siteId = columns[3]; try { Processors.BBCS.SOAP.EmailStatusService.UserName = uname; Processors.BBCS.SOAP.EmailStatusService.Password = pwd; var proxy = Processors.BBCS.SOAP.EmailStatusService.proxy(Processors.BBCS.SOAP.EmailStatusService.HTTPS_SCHEME); Processors.BBCS.SOAP.Status.GetJobStatus soapStatus = new Processors.BBCS.SOAP.Status.GetJobStatus(proxy); soapStatus.Execute(); } catch (Exception ex) { exceptions.Add(new StatusException() { SiteId = siteId, Name = name, UserName = uname, Password = pwd, Message = ex.Message }); } } } catch (Exception) { throw; } finally { if (exceptions.Count > 0) { System.Collections.Generic.List<string> list = new List<string>(); list.Add("ServiceUser,ServicePassword,Name,SiteID,ErrorMessage"); foreach (StatusException se in exceptions) { list.Add(se.Serialize()); } System.IO.File.WriteAllLines(exceptionsPath, list.ToArray()); } } break; case BBCSProtocol.REST: Processors.BBCS.REST.Status.GetJobStatus(); break; } Console.WriteLine("Complete"); break; case BBCSAction.RecipientStatus: Processors.BBCS.REST.Status.GetRecipientStatus(); Console.WriteLine("Complete"); break; case BBCSAction.Data: DataSet ds = new DataSet(); string cs = System.Configuration.ConfigurationManager.ConnectionStrings["BBCS"].ConnectionString; string startDate = System.Configuration.ConfigurationManager.AppSettings.Get("startdate"); string endDate = System.Configuration.ConfigurationManager.AppSettings.Get("enddate"); using (SqlConnection con = new SqlConnection(cs)) { using (SqlCommand cmd = con.CreateCommand()) { cmd.Connection.Open(); cmd.CommandText = "select id from dbo.generalpurposeemailjobinfo i where (i.dateadded between @startdate and @enddate) and i.statuscode = 3 ;"; cmd.Parameters.Add(new SqlParameter("@startdate", startDate)); cmd.Parameters.Add(new SqlParameter("@enddate", endDate)); SqlDataAdapter da = new SqlDataAdapter(cmd); da.Fill(ds); } } Console.WriteLine("Jobs found: " + ds.Tables[0].Rows.Count.ToString()); string csvFormat = @"{0},{1},{2},{3},{4}"; if (ds.Tables.Count > 0 && ds.Tables[0].Rows.Count > 0) { Console.WriteLine("Jobs found: " + ds.Tables[0].Rows.Count.ToString()); using (StreamWriter sw = new StreamWriter(@"C:\Users\scott.carnley\Documents\listquality\recipientgroups.csv")) { sw.WriteLine("EmailJobId,Unknown(0),Low(1),Medium(2),High(3)"); foreach (DataRow dr in ds.Tables[0].Rows) { Console.Write("."); Processors.BBCS.MergeData md = new Processors.BBCS.MergeData(); md.ConnectionString = cs; md.EmailJobId = dr[0].ToString(); Processors.BBCS.MergeData.Stats st = md.ProcessMergeData(); sw.WriteLine(string.Format(csvFormat, st.EmailJobId, st.Unknown.ToString(), st.Low.ToString(), st.Medium.ToString(), st.High.ToString())); } } Console.WriteLine("Jobs processed: " + ds.Tables[0].Rows.Count.ToString()); } else { Console.WriteLine("No jobs found."); } //Console.ReadLine(); //Console.WriteLine(); break; case BBCSAction.Transactional: string trx_tag = CCUpdate.Program.WriteMenu(CCUpdate.Program.EMAIL_JOB_BBCS, "Enter tag", true); string trxScreen = CCUpdate.Program.GetMenu(CCUpdate.Program.RUN_BBCS); CCUpdate.Program.WriteScreen(string.Format(trxScreen, connection.Host, connection.User, connection.Password, trx_tag, connection.Transactional())); DataContext.BBCS.Transactional.Request trxR = DataContext.BBCS.RequestFactory.GetTrxRequest(); trxR.OrganizationID = connection.OrganizationId.ToString(); Processors.BBCS.REST.Submission.Transactional t = new Processors.BBCS.REST.Submission.Transactional(trxR, connection.Transactional(), connection.AuthorizationHeader); t.Submit(); Console.WriteLine("Complete"); break; case BBCSAction.Attachment: string attScreen = CCUpdate.Program.GetMenu(CCUpdate.Program.RUN_BBCS); CCUpdate.Program.WriteScreen(string.Format(attScreen, connection.Host, connection.User, connection.Password, "N/A", connection.Attachments())); AttachmentService svc = new AttachmentService(connection.Attachments(), connection.AuthorizationHeader); svc.Submit(null); break; case BBCSAction.LoadBalancer: break; } }
protected override void OnOK(object sender, EventArgs args) { if (this.ItemList.Items.Length == 0) { Context.ClientPage.ClientResponse.Alert(MessageNoItemsSelected); return; } List<MigrationWorker.ItemReference> itemsToProcess = new List<MigrationWorker.ItemReference>(); foreach (ListviewItem item in this.ItemList.Items) { string[] textArray = item.Value.Split(new char[] { ':' }, 2); ItemUri uri = ItemUri.Parse(textArray[1]); if (uri != null) { itemsToProcess.Add(new MigrationWorker.ItemReference(uri, textArray[0] == "recursive")); } } bool convertToBlob = this.TargetGroup.Value.Equals(ConvertToBlob, StringComparison.InvariantCultureIgnoreCase); Job job = MigrationWorker.CreateJob(itemsToProcess.ToArray(), convertToBlob); JobManager.Start(job); string url = "/sitecore/shell/default.aspx?xmlcontrol=MediaConversionToolWorkingForm&handle=" + job.Handle; SheerResponse.SetLocation(url); }
public static void Cryptography(CommandArguments args) { switch (args.CryptographyAction) { case CryptographyAction.Custom: Processors.Cryptography.AsymetricEncryption asym = new Processors.Cryptography.AsymetricEncryption(); do { Console.Write("Enter value to encrypt: "); string value = Console.ReadLine(); if (value == "quit") break; byte[] bytes = System.Text.Encoding.ASCII.GetBytes(value); byte[] cipher = null; try { cipher = asym.Encrypt(bytes); Console.WriteLine("Encrypted value: " + System.Convert.ToBase64String(cipher)); byte[] decryptedBytes = asym.Decrypt(cipher); string back = System.Text.Encoding.ASCII.GetString(decryptedBytes); Console.WriteLine("Original value: " + back); } catch (Exception e) { Console.WriteLine(e.Message); } finally { Console.WriteLine(); } } while (true); break; case CryptographyAction.RSA: Processors.Cryptography.RSA rsa = new Processors.Cryptography.RSA(); byte[] num = System.BitConverter.GetBytes(30); rsa.GenerateKeys(); byte[] rsaCipher = rsa.Encrypt(num); byte[] plain = rsa.Decrypt(rsaCipher); string base64 = System.Convert.ToBase64String(plain); int result = System.BitConverter.ToInt32(plain, 0); using (System.IO.StreamWriter fs = System.IO.File.CreateText(@"c:\test.txt")) { fs.Write(base64); } break; case CryptographyAction.SMIME: Processors.SMIME.CMS smime = new Processors.SMIME.CMS(); byte[] sessionKey, sessionIV, cmsMessage; using (StreamReader reader = new StreamReader(@"E:\Files\Payments\Managed File Handler\PayflowPro\payflowpro6\test\latenighttest\single.cipher")) { for (int i = 0; i < 5; i++) { reader.ReadLine(); } // S/MIME header data. It's being stripped because it's unecessary. string smimeBase64 = reader.ReadToEnd(); cmsMessage = System.Convert.FromBase64String(smimeBase64); } smime.Certificate = Actions.GetCertificate("pfp"); byte[] carData = smime.UnWrapCMS(cmsMessage, out sessionKey, out sessionIV); byte[] decrypted = smime.Decrypt(carData, sessionKey, sessionIV); List<char> cl = new List<char>(); foreach (byte b in decrypted) { cl.Add((char)b); } Console.WriteLine(cl.ToArray()); //Console.ReadLine(); break; case CryptographyAction.BASE64: string encoded = "eNq9V1tvmzAUTq9a120P28NeEdpjCJCEECKHTutlitakVVpNfasInCR0XDJjcvkZe9rP7WxwgEWpNk0rjmKbz8ffd2xj+1DZqVQqjzSxkqVXuzQb3KwiAn7tzCJWVfgKOHLDoNusKexXFU5jj8QYugHEBFteVbiOR55rf4HVbfgNgu5I1y3N1lqq0WiC0jYOGPm7AmeS3QDZo/h7Xq8NwQ+JG0y43NGd793YU/CtY1o7c8fjz9jy93Z23nAibsf8PWJEh/s0+/EWnSx9T5hzn0W1pogCBHboUOquGJOxpLbEE/P1S7SMOlEiILhOVxyC7c5cCEgkCpQhiLoir3SWtD4lZNaR5cViUVs0aiGeyHVFUeW7/mXq5NrWjxw6HiqEA84eSb5r4zAKx0SyQ79D7aTUSqReCALzAzzwqbQQWD787kpq2elFfJq6IsExZPhlaFse7ZJSpWRUZObB8nY1A45yfBq6Ngi+G1zZdozpoOjU+NZy/RQHozAOHHDErNtW754UL0htcSFvjeB7TJdks2mb2LkoEErSFVk3gukaZurEwhMgA2oWzSybubExNPkv+AfPzN97Zv7+M/Nf1FQt2fZl6Kgl6dRL0mmUpNMsSUcrSadVko5ekk67JB2jrH1a2oFQ1omg/ucjAclPXXNJy/brMWnizuUXt5zd3OsrfhvDRt9UPwk/zMMDGhf9PEAODaEmuJMWlv8v8Qrvk1L8qc9aSJqroonymIbWBe5LEnep2TTjcHGFHcDJlHKTqRWdTq1gAnSi3SACTFh8gs7NBxfP3QA+Lry4Bk6M5HMTDczTMJwBFm4B4xWSBybqmZqiGrpeR3LPRH0TtIaqGZohtUeaIjXbhiONNN2QDNoA9khvgGojuW+i7OYzFSTnDyi7qTJczfF6Ea/neKOIN3Kcndy5gzmEsvM266XleKuIt3JcL+J6jrPzh76iKDsk+DRxS6MwLqVommw/84NaVRSKZ8MtjDfZN+a1hdnCChc0elwbpYOXh+xfWHp54y0091novhHSs0D+8L5vPYSYFm7Aik+x6zlH90OYu8yEfaq8oGmXf7Y88nT8C4qtxZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="; byte[] decodedbytes = Convert.FromBase64String(encoded); string decompressed = DecompressStatusData(decodedbytes); break; } }
/// <summary> /// Validates the view model to model mappings. /// </summary> /// <exception cref="ModelNotRegisteredException">A property is mapped to a model that does not exists.</exception> private void ValidateViewModelToModelMappings() { foreach (var viewModelToModelMapping in _viewModelToModelMap) { var mapping = viewModelToModelMapping.Value; if (!IsModelRegistered(mapping.ModelProperty)) { throw Log.ErrorAndCreateException(msg => new ModelNotRegisteredException(mapping.ModelProperty, mapping.ViewModelProperty), "There is no model '{0}' registered with the model attribute, so the ViewModelToModel attribute on property '{1}' is invalid", mapping.ModelProperty, mapping.ViewModelProperty); } var viewModelPropertyType = GetPropertyData(mapping.ViewModelProperty).Type; var modelPropertyType = GetPropertyData(mapping.ModelProperty).Type; var modelPropertyPropertyTypes = new List<Type>(mapping.ValueProperties.Length); foreach (var valueProperty in mapping.ValueProperties) { var modelPropertyPropertyInfo = modelPropertyType.GetPropertyEx(valueProperty); if (modelPropertyPropertyInfo == null) { Log.Warning("Mapped viewmodel property '{0}' to model property '{1}' is invalid because property '{1}' is not found on the model '{2}'.\n\n" + "If the property is defined in a sub-interface, reflection does not return it as a valid property. If this is the case, you can safely ignore this warning", mapping.ViewModelProperty, valueProperty, mapping.ModelProperty); } else { modelPropertyPropertyTypes.Add(modelPropertyPropertyInfo.PropertyType); } } if (!mapping.Converter.CanConvert(modelPropertyPropertyTypes.ToArray(), viewModelPropertyType, GetType())) { Log.Warning("Property '{0}' mapped on model properties '{1}' cannot be converted via given converter '{2}'", mapping.ViewModelProperty, string.Join(", ", mapping.ValueProperties), mapping.ConverterType); } } }
public override int DeleteInactiveProfiles(ProfileAuthenticationOption authenticationOption, DateTime userInactiveSinceDate) { if (!this.Initialized || this.ReadOnly) { return 0; } List<ISalesforceContact> contacts = this.ContactsApi.GetAll(0, int.MaxValue); List<string> userNames = new List<string>(); foreach (var contact in contacts) { if (contact.LastActivityDate <= userInactiveSinceDate) { userNames.Add(contact.Login); } } return this.DeleteProfiles(userNames.ToArray()); }
/// <summary> /// The update erection sequence. /// </summary> /// <param name="factory"> /// The factory. /// </param> /// <param name="project"> /// The project. /// </param> /// <param name="erectionSequenceList"> /// The erection sequence list. /// </param> /// <returns> /// The System.Boolean. /// </returns> public bool UpdateErectionSequence( string factory, string project, List<KeyValuePair<int, int>> erectionSequenceList ) { project = project.PadLeft( 12 ); List<string> statementList = new List<string>( erectionSequenceList.Count ); using( var database = new ImpactDatabase() ) { var allIdArray = erectionSequenceList.Select( x => (object)x.Key ).ToArray(); ImpactQuery query = new ImpactQuery { Select = { ImpModelPlanning.ElementId }, From = { ImpModelPlanning.As( "T1" ) }, Where = { ImpModelPlanning.Factory.Equal( factory ), ImpModelPlanning.Project.Equal( project ), ImpModelPlanning.ElementId.In( allIdArray ), }, }; string statement = query.ToString(); var existingPlanningList = database.GetAll( statement, column => column[0].Cast<int>() ); var groupedByInsertUpdate = erectionSequenceList.GroupBy( x => existingPlanningList.Remove( x.Key ) ).ToList(); var updateList = groupedByInsertUpdate.Find( x => x.Key ); var insertList = groupedByInsertUpdate.Find( x => !x.Key ); if( null != updateList ) { foreach( var item in updateList ) { var update = new ImpactUpdate( ImpModelPlanning.Instance ) { Columns = { { ImpModelPlanning.ErectionSequenceNo, item.Value } }, Where = { ImpModelPlanning.Factory.Equal( factory ), ImpModelPlanning.Project.Equal( project ), ImpModelPlanning.ElementId.Equal( item.Key ), } }; statementList.Add( update.ToString() ); } } if( null != insertList ) { foreach( var item in insertList ) { var insert = new ImpactInsert( ImpModelPlanning.Instance ) { Columns = { { ImpModelPlanning.Factory, factory }, { ImpModelPlanning.Project, project }, { ImpModelPlanning.ElementId, item.Key }, { ImpModelPlanning.ErectionSequenceNo, item.Value }, }, }; statementList.Add( insert.ToString() ); } } int result = database.ExecuteNonQuery( statementList.ToArray() ); return result > 0; } }
/// <summary> /// Generates the URL for the listing. /// </summary> /// <param name="pageIndex">Index of the page.</param> /// <returns>A URL to the given page of this list</returns> private string GenerateUrlForPage(int pageIndex) { List<string> parameters = new List<string>(); parameters.Add("PageIndex=" + pageIndex.ToString(CultureInfo.InvariantCulture)); this.AddParameter(parameters, "Address"); this.AddParameter(parameters, "City"); this.AddParameter(parameters, "Region"); this.AddParameter(parameters, "Zip"); this.AddParameter(parameters, "Country"); this.AddParameter(parameters, "FilterCountry"); this.AddParameter(parameters, "Distance"); this.AddParameter(parameters, "All"); return Globals.NavigateURL(this.TabId, string.Empty, parameters.ToArray()); }
// ReSharper restore SuggestBaseTypeForParameter // ReSharper disable SuggestBaseTypeForParameter private static void SetPagingLink(NameValueCollection queryString, HyperLink link, bool showLink, int linkedPageId, int tabId) { if (showLink) { link.Visible = true; queryString = new NameValueCollection(queryString); queryString["catpageid"] = linkedPageId.ToString(CultureInfo.InvariantCulture); var additionalParameters = new List<string>(queryString.Count); for (int i = 0; i < queryString.Count; i++) { if (string.Equals(queryString.GetKey(i), "TABID", StringComparison.OrdinalIgnoreCase)) { int newTabId; if (int.TryParse(queryString[i], NumberStyles.Integer, CultureInfo.InvariantCulture, out newTabId)) { tabId = newTabId; } } else if (!string.Equals(queryString.GetKey(i), "LANGUAGE", StringComparison.OrdinalIgnoreCase)) { additionalParameters.Add(queryString.GetKey(i) + "=" + queryString[i]); } } link.NavigateUrl = Globals.NavigateURL(tabId, string.Empty, additionalParameters.ToArray()); } else { link.Visible = false; } }
public static string[] GetStringArrayFromSensiumResultSetList(List<SensiumResultPhrase> resultlist) { List<string> toreturn = new List<string>(); foreach(SensiumResultPhrase sensphrase in resultlist) { if (sensphrase != null) { toreturn.Add(sensphrase.Text); } } return toreturn.ToArray(); }