public void TestSqlContextJsonFile() { var sqlContext = new SqlContext(new SparkContext("", "")); var dataFrame = sqlContext.JsonFile(@"c:\path\to\input.json"); var paramValuesToJsonFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference as object[]; Assert.AreEqual(@"c:\path\to\input.json", paramValuesToJsonFileMethod[0]); }
public void Save(PageView currentPage, string controlPlaceId, ControlData[] data) { if(_connectionString!=string.Empty) { SqlContext context = new SqlContext(); context.ConnectionString=_connectionString; try { context.BeginTransaction(); //delete all controls in control place SqlHelper.ExecuteNonQuery(context,CommandType.StoredProcedure,"page_ControlPropertiesDeleteAllInControlPlace", new SqlParameter("@PageUID",currentPage.Id),new SqlParameter("@ControlPlaceID",controlPlaceId)); //add new controls from array "data" for(int i=0;i<data.Length;i++) { SqlHelper.ExecuteNonQuery(context,CommandType.StoredProcedure,"page_ControlPropertiesInsertInControlPlace", new SqlParameter("@PageUID",currentPage.Id), new SqlParameter("@ControlPlaceID",controlPlaceId), new SqlParameter("@ControlIndex",i), new SqlParameter("@ControlUID",data[i].ControlUID), new SqlParameter("@ControlProperties",data[i].Settings)); } context.Commit(); } catch { context.Rollback(); throw; } }//if(_connectionString!=string.Empty) }
static void Main(string[] args) { LoggerServiceFactory.SetLoggerService(Log4NetLoggerService.Instance); //this is optional - DefaultLoggerService will be used if not set var logger = LoggerServiceFactory.GetLogger(typeof(SparkXmlExample)); var inputXmlFilePath = args[0]; var outputXmlFilePath = args[1]; var sparkConf = new SparkConf(); sparkConf.SetAppName("myapp"); var sparkContext = new SparkContext(sparkConf); var sqlContext = new SqlContext(sparkContext); var df = sqlContext.Read() .Format("com.databricks.spark.xml") .Option("rowTag", "book") .Load(inputXmlFilePath); //"D:\temp\books.xml", "file:/D:/temp/books.xml" or "hdfs://temp/books.xml" df.ShowSchema(); var rowCount = df.Count(); logger.LogInfo("Row count is " + rowCount); var selectedData = df.Select("author", "@id"); selectedData.Write() .Format("com.databricks.spark.xml") .Option("rootTag", "books") .Option("rowTag", "book") .Save(outputXmlFilePath); //"D:\temp\booksUpdated.xml", "file:/D:/temp/booksUpdated.xml" or "hdfs://temp/booksUpdated.xml" sparkContext.Stop(); }
public override SqlContext Parse(SqlContext ctx) { if (Compare_complexs == null || Compare_complexs.Count == 0) return null; var rslt = new SqlContext(); foreach (var cc in Compare_complexs) { var temp = cc.Parse(ctx); //rslt = rslt.Join(temp); } for (int index = 0; index < Relations.Count; index++) { var relation = Relations[index]; RelationType relationType = RelationType.Unknown; switch (relation.ToLower().Trim()) { case "and": relationType = RelationType.And; break; case "or": relationType = RelationType.Or; break; default: break; } rslt.RelationTypes.Add(relationType); } return rslt; }
private static void InitializeSparkContext(string[] args) { var sparkConf = new SparkConf(); sparkConf.Set("spark.local.dir", args[0]); sparkConf.SetAppName("SparkCLR perf suite - C#"); SparkContext = new SparkContext(sparkConf); SqlContext = new SqlContext(PerfBenchmark.SparkContext); }
public void TestDataFrameJoin() { var sqlContext = new SqlContext(new SparkContext("", "")); var dataFrame = sqlContext.JsonFile(@"c:\path\to\input.json"); var dataFrame2 = sqlContext.JsonFile(@"c:\path\to\input2.json"); var joinedDataFrame = dataFrame.Join(dataFrame2, "JoinCol"); var paramValuesToJoinMethod = (joinedDataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference as object[]; var paramValuesToSecondDataFrameJsonFileMethod = ((paramValuesToJoinMethod[0] as MockDataFrameProxy).mockDataFrameReference as object[]); Assert.AreEqual(@"c:\path\to\input2.json", paramValuesToSecondDataFrameJsonFileMethod[0]); Assert.AreEqual("JoinCol", paramValuesToJoinMethod[1]); }
public void TestSqlContextNewSession() { // arrange var sessionProxy = new SqlContextIpcProxy(new JvmObjectReference("1")); mockSqlContextProxy.Setup(m => m.NewSession()).Returns(sessionProxy); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); // act var actualNewSession = sqlContext.NewSession(); // assert Assert.AreEqual(sessionProxy, actualNewSession.SqlContextProxy); }
public void TestSqlContextGetConf() { // arrange const string key = "key"; const string value = "value"; mockSqlContextProxy.Setup(m => m.GetConf(key, "")).Returns(value); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); // act var actualValue = sqlContext.GetConf(key, ""); // assert Assert.AreEqual(value, actualValue); }
static void Main(string[] args) { LoggerServiceFactory.SetLoggerService(Log4NetLoggerService.Instance); //this is optional - DefaultLoggerService will be used if not set var logger = LoggerServiceFactory.GetLogger(typeof(JdbcDataFrameExample)); var sparkConf = new SparkConf(); var sparkContext = new SparkContext(sparkConf); var sqlContext = new SqlContext(sparkContext); var df = sqlContext.Read() .Jdbc("jdbc:sqlserver://localhost:1433;databaseName=Temp;;integratedSecurity=true;", "xyz", new Dictionary<string, string>()); df.ShowSchema(); var rowCount = df.Count(); logger.LogInfo("Row count is " + rowCount); }
public RoslynScriptEngine(SparkContext sc, SqlContext sqlContext) { this.sc = sc; sparkConf = sc.GetConf(); host = new SparkCLRHost { sc = sc, sqlContext = sqlContext }; var sparkLocalDir = sparkConf.Get("spark.local.dir", Path.GetTempPath()); compilationDumpDirectory = Path.Combine(sparkLocalDir, Path.GetRandomFileName()); Directory.CreateDirectory(compilationDumpDirectory); options = new CSharpParseOptions(LanguageVersion.CSharp6, DocumentationMode.Parse, SourceCodeKind.Script); }
public void TestSqlContextTextFile() { var sqlContext = new SqlContext(new SparkContext("", "")); var dataFrame = sqlContext.TextFile(@"c:\path\to\input.txt"); var paramValuesToTextFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference as object[]; Assert.AreEqual(@"c:\path\to\input.txt", paramValuesToTextFileMethod[0]); Assert.AreEqual(@",", paramValuesToTextFileMethod[1]); Assert.IsFalse(bool.Parse(paramValuesToTextFileMethod[2].ToString())); Assert.IsFalse(bool.Parse(paramValuesToTextFileMethod[3].ToString())); sqlContext = new SqlContext(new SparkContext("", "")); dataFrame = sqlContext.TextFile(@"c:\path\to\input.txt", "|", true, true); paramValuesToTextFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference as object[]; Assert.AreEqual(@"c:\path\to\input.txt", paramValuesToTextFileMethod[0]); Assert.AreEqual(@"|", paramValuesToTextFileMethod[1]); Assert.IsTrue(bool.Parse(paramValuesToTextFileMethod[2].ToString())); Assert.IsTrue(bool.Parse(paramValuesToTextFileMethod[3].ToString())); }
static void Main(string[] args) { LoggerServiceFactory.SetLoggerService(Log4NetLoggerService.Instance); //this is optional - DefaultLoggerService will be used if not set var logger = LoggerServiceFactory.GetLogger(typeof(JdbcDataFrameExample)); //For SQL Server use the connection string formats below //"jdbc:sqlserver://localhost:1433;databaseName=Temp;integratedSecurity=true;" or //"jdbc:sqlserver://localhost;databaseName=Temp;user=MyUserName;password=myPassword;" var connectionString = args[0]; var tableName = args[1]; var sparkConf = new SparkConf(); var sparkContext = new SparkContext(sparkConf); var sqlContext = new SqlContext(sparkContext); var df = sqlContext .Read() .Jdbc(connectionString, tableName, new Dictionary<string, string>()); df.ShowSchema(); var rowCount = df.Count(); logger.LogInfo("Row count is " + rowCount); sparkContext.Stop(); }
public ControlData[] Load(PageView currentPage, string controlPlaceId) { ArrayList retVal=new ArrayList(); if(_connectionString!=string.Empty) { SqlContext context = new SqlContext(); context.ConnectionString=this.ConnectionString; using(IDataReader reader = SqlHelper.ExecuteReader(context,CommandType.StoredProcedure,"page_ControlPropertiesSelectAllInControlPlace", new SqlParameter("@PageUID",currentPage.Id), new SqlParameter("@ControlPlaceID",controlPlaceId))) { while(reader.Read()) { retVal.Add(new ControlData(reader.GetString(1), reader.GetString(2))); }//while(reader.Read()) }//using(IDataReader reader = SqlHelper.ExecuteReader(context,CommandType.StoredProcedure,"page_ControlsSelectAllInControlPlace", }//if(_connectionString!=string.Empty) return (ControlData[])retVal.ToArray(typeof(ControlData)); }
public SqlInsertEntityQuery(SqlContext context, T entity) : base(context) { AddInsertEntity(entity); }
/// <summary> /// Construtor com Sql context /// </summary> /// <param name="sqlContext">Sql context</param> public RepositorioLivro(SqlContext sqlContext) : base(sqlContext) { _sqlContext = sqlContext; }
public PersonRepository(SqlContext context) : base(context) { _context = context; }
public EnderecoTipoRepository(SqlContext repositoryContext) : base(repositoryContext) { }
private static SqlContext GetSqlContext() { return(sqlContext ?? (sqlContext = new SqlContext(SparkCLRSamples.SparkContext))); }
public ProductRepository(SqlContext Context) : base(Context) { _context = Context; }
static void Main(string[] args) { var cassandraHostName = "localhost"; var cassandraKeySpace = "ks"; var cassandraTableToRead = "users"; var cassandraTableToInsert = "filteredusers"; if (args.Length == 4) { cassandraHostName = args[0]; cassandraKeySpace = args[1]; cassandraTableToRead = args[2]; cassandraTableToInsert = args[3]; } /* ** CQL used to create table in Cassandra for this example ** CREATE TABLE users ( username VARCHAR, firstname VARCHAR, lastname VARCHAR, PRIMARY KEY (username) ); INSERT INTO ks.users (username, firstname, lastname) VALUES ('JD123', 'John', 'Doe'); INSERT INTO ks.users (username, firstname, lastname) VALUES ('BillJ', 'Bill', 'Jones'); INSERT INTO ks.users (username, firstname, lastname) VALUES ('SL', 'Steve', 'Little'); CREATE TABLE filteredusers ( username VARCHAR, firstname VARCHAR, lastname VARCHAR, PRIMARY KEY (username) ); */ var sparkConf = new SparkConf().Set("spark.cassandra.connection.host", cassandraHostName); var sparkContext = new SparkContext(sparkConf); var sqlContext = new SqlContext(sparkContext); //read from cassandra table var usersDataFrame = sqlContext.Read() .Format("org.apache.spark.sql.cassandra") .Options(new Dictionary<string, string> { {"keyspace", cassandraKeySpace }, { "table", cassandraTableToRead } }) .Load(); //display rows in the console usersDataFrame.Show(); var createTempTableStatement = string.Format( "CREATE TEMPORARY TABLE userstemp USING org.apache.spark.sql.cassandra OPTIONS(table \"{0}\", keyspace \"{1}\")", cassandraTableToRead, cassandraKeySpace); //create a temp table sqlContext.Sql(createTempTableStatement); //read from temp table, filter it and display schema and rows var filteredUsersDataFrame = sqlContext.Sql("SELECT * FROM userstemp").Filter("username = '******'"); filteredUsersDataFrame.ShowSchema(); filteredUsersDataFrame.Show(); //write filtered rows to another table filteredUsersDataFrame.Write() .Format("org.apache.spark.sql.cassandra") .Options(new Dictionary<string, string> { { "keyspace", cassandraKeySpace }, { "table", cassandraTableToInsert } }) .Save(); //convert to RDD, execute map & filter and collect result var rddCollectedItems = usersDataFrame.ToRDD() .Map( r => string.Format("{0},{1},{2}", r.GetAs<string>("username"), r.GetAs<string>("firstname"), r.GetAs<string>("lastname"))) .Filter(s => s.Contains("SL")) .Collect(); foreach (var rddCollectedItem in rddCollectedItems) { Console.WriteLine(rddCollectedItem); } Console.WriteLine("Completed running example"); }
public void TestSqlContextTables() { // arrange var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); var dataFrameProxy = new DataFrameIpcProxy(new JvmObjectReference("1"), mockSqlContextProxy.Object); mockSqlContextProxy.Setup(m => m.Tables()).Returns(dataFrameProxy); mockSqlContextProxy.Setup(m => m.Tables(It.IsAny<string>())).Returns(dataFrameProxy); // act var actualTablesDataFrame = sqlContext.Tables(); // assert Assert.AreEqual(dataFrameProxy, actualTablesDataFrame.DataFrameProxy); // act actualTablesDataFrame = sqlContext.Tables("db"); // assert Assert.AreEqual(dataFrameProxy, actualTablesDataFrame.DataFrameProxy); }
public void TestSqlContextTextFile() { var sqlContext = new SqlContext(new SparkContext("", "")); var dataFrame = sqlContext.TextFile(@"c:\path\to\input.txt"); var paramValuesToTextFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference; Assert.AreEqual(@"c:\path\to\input.txt", paramValuesToTextFileMethod[0]); Assert.AreEqual(@",", paramValuesToTextFileMethod[1]); Assert.IsFalse(bool.Parse(paramValuesToTextFileMethod[2].ToString())); Assert.IsFalse(bool.Parse(paramValuesToTextFileMethod[3].ToString())); sqlContext = new SqlContext(new SparkContext("", "")); dataFrame = sqlContext.TextFile(@"c:\path\to\input.txt", "|", true, true); paramValuesToTextFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference; Assert.AreEqual(@"c:\path\to\input.txt", paramValuesToTextFileMethod[0]); Assert.AreEqual(@"|", paramValuesToTextFileMethod[1]); Assert.IsTrue(bool.Parse(paramValuesToTextFileMethod[2].ToString())); Assert.IsTrue(bool.Parse(paramValuesToTextFileMethod[3].ToString())); // Test with a given schema sqlContext = new SqlContext(new SparkContext("", "")); var structTypeProxy = new Mock<IStructTypeProxy>(); structTypeProxy.Setup(m => m.ToJson()).Returns(RowHelper.BasicJsonSchema); var structType = new StructType(structTypeProxy.Object); dataFrame = sqlContext.TextFile(@"c:\path\to\input.txt", structType); paramValuesToTextFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference; Assert.AreEqual(@"c:\path\to\input.txt", paramValuesToTextFileMethod[0]); Assert.AreEqual(structType, paramValuesToTextFileMethod[1]); Assert.AreEqual(@",", paramValuesToTextFileMethod[2]); }
public void TestSqlContextSetConf() { // arrange const string key = "key"; const string value = "value"; mockSqlContextProxy.Setup(m => m.SetConf(key, value)); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); // act sqlContext.SetConf(key, value); // assert mockSqlContextProxy.Verify(m => m.SetConf(key, value)); }
public void TestSqlContextClearCache() { // arrange mockSqlContextProxy.Setup(m => m.ClearCache()); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); // act sqlContext.ClearCache(); // assert mockSqlContextProxy.Verify(m => m.ClearCache()); }
public void TestSqlContextUncacheTable() { // arrange mockSqlContextProxy.Setup(m => m.UncacheTable(It.IsAny<string>())); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); // act sqlContext.UncacheTable("table"); // assert mockSqlContextProxy.Verify(m => m.UncacheTable("table")); }
public void TestSqlContextTableNames() { // arrange string[] tableNames = new string[] { "table1", "table2" }; var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); mockSqlContextProxy.Setup(m => m.TableNames()).Returns(tableNames); mockSqlContextProxy.Setup(m => m.TableNames(It.IsAny<string>())).Returns(tableNames); // act var actualTableNames = sqlContext.TableNames(); // assert Assert.AreEqual(tableNames, actualTableNames); // act actualTableNames = sqlContext.TableNames("db"); // assert Assert.AreEqual(tableNames, actualTableNames); }
public RepositoryCliente(SqlContext sqlContext) : base(sqlContext) { this.sqlContext = sqlContext; }
private static SqlContext GetSqlContext() { return _sqlContext ?? (_sqlContext = new SqlContext(_sparkContext)); }
public BlogPostController(SqlContext context) { _db = context; }
public void TestSqlContextSql() { var sqlContext = new SqlContext(new SparkContext("", "")); var dataFrame = sqlContext.Sql("Query of SQL text"); var paramValuesToJsonFileMethod = (dataFrame.DataFrameProxy as MockDataFrameProxy).mockDataFrameReference; Assert.AreEqual("Query of SQL text", paramValuesToJsonFileMethod[0]); }
public InterestRepository(SqlContext context) : base(context) { }
public void TestSqlContextConstructor() { var sqlContext = new SqlContext(new SparkContext("", "")); Assert.IsNotNull((sqlContext.SqlContextProxy as MockSqlContextProxy).mockSqlContextReference); }
public UnitOfWork(SqlContext sqlContext) { _sqlContext = sqlContext; }
public void TestSqlContextCreateDataFrame() { // arrange var mockSparkContextProxy = new Mock<ISparkContextProxy>(); mockSparkContextProxy.Setup(m => m.CreateCSharpRdd(It.IsAny<IRDDProxy>(), It.IsAny<byte[]>(), It.IsAny<Dictionary<string, string>>(), It.IsAny<List<string>>(), It.IsAny<bool>(), It.IsAny<List<Broadcast>>(), It.IsAny<List<byte[]>>())); var rddProxy = new Mock<IRDDProxy>(); var rdd = new RDD<object[]>(rddProxy.Object, new SparkContext(mockSparkContextProxy.Object, new SparkConf())); var dataFrameProxy = new DataFrameIpcProxy(new JvmObjectReference("1"), mockSqlContextProxy.Object); mockSqlContextProxy.Setup(m => m.CreateDataFrame(It.IsAny<IRDDProxy>(), It.IsAny<IStructTypeProxy>())).Returns(dataFrameProxy); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); var structTypeProxy = new Mock<IStructTypeProxy>(); structTypeProxy.Setup(m => m.ToJson()).Returns(RowHelper.ComplexJsonSchema); // act var dataFrame = sqlContext.CreateDataFrame(rdd, new StructType(structTypeProxy.Object)); // assert Assert.AreEqual(dataFrameProxy, dataFrame.DataFrameProxy); }
public RepositoryUsuario(SqlContext Context) : base(Context) { _context = Context; }
public PersonRepository(SqlContext sqlContext) { this.sqlContext = sqlContext; }
protected BaseRepository(SqlContext ctx) { this.ctx = ctx; }
public ProdutoRepository(SqlContext sqlContext) : base(sqlContext) { this.sqlContext = sqlContext; }
public string ToSql(SqlContext context) { SqlColumn column = context.GetColumn(Expression); return($"{column.ToSql(context)} {(Direction == SqlSortDirection.Descending ? "DESC" : "ASC")}"); }
public TopicController(SqlContext context, IMapper iMapper) { _context = context; }
public void Test() { var sc = new SparkContext("", ""); var sqlContext = new SqlContext(sc); var scriptEngine = new RoslynScriptEngine(sc, sqlContext); var ioHandler = new TestIoHandler(); var repl = new Repl(scriptEngine, ioHandler); repl.Init(); var thread = new Thread(() => { repl.Run();}) { IsBackground = false }; thread.Start(); Thread.Sleep(1000); Assert.IsTrue(ioHandler.output.Any()); Assert.AreEqual("> ", ioHandler.output.Last()); ioHandler.output.Clear(); // empty input ioHandler.input.Add(" "); // incomplete code block ioHandler.input.Add("if (true) {"); ioHandler.input.Add("return 1024; }"); // execution exception ioHandler.input.Add("new Exception(\"Test\")"); // compile exception ioHandler.input.Add("var a=;"); // load non-exist library ioHandler.input.Add(":load \"non-exist.dll\""); // load library var sampleDLL = scriptEngine.CompilationDumpPath(0); ioHandler.input.Add(":load \"" + sampleDLL + "\""); // invalid :load directive ioHandler.input.Add(":load x"); // invalid directive ioHandler.input.Add(":invalid directive"); // :help directive ioHandler.input.Add(":help"); // quit REPL ioHandler.input.Add(":quit"); thread.Join(); scriptEngine.Close(); Console.WriteLine(string.Join("\r\n", ioHandler.output)); var seq = 0; Assert.AreEqual("> ", ioHandler.output[seq++]); Assert.AreEqual(". ", ioHandler.output[seq++]); Assert.AreEqual("1024", ioHandler.output[seq++]); Assert.AreEqual("> ", ioHandler.output[seq++]); // execution exception Assert.IsTrue(ioHandler.output[seq++].Contains("System.Exception: Test")); Assert.AreEqual("> ", ioHandler.output[seq++]); // compile exception Assert.IsTrue(ioHandler.output[seq++].Contains("Exception")); Assert.AreEqual("> ", ioHandler.output[seq++]); // load non-exist library Assert.IsTrue(ioHandler.output[seq++].Contains("Failed to load assebmly")); Assert.AreEqual("> ", ioHandler.output[seq++]); // load library Assert.IsTrue(ioHandler.output[seq++].Contains("Loaded assebmly")); Assert.AreEqual("> ", ioHandler.output[seq++]); // invalid :load directive Assert.IsTrue(ioHandler.output[seq++].Contains("Invalid :load directive")); Assert.AreEqual("> ", ioHandler.output[seq++]); // invalid directive Assert.IsTrue(ioHandler.output[seq++].Contains("Invalid directive")); Assert.AreEqual("> ", ioHandler.output[seq++]); // help directive Assert.IsTrue(ioHandler.output[seq++].Contains("Commands")); Assert.AreEqual("> ", ioHandler.output[seq++]); }
public AuthController(SqlContext ctx, IAuthenticationProvider auth) { _auth = auth; _ctx = ctx; }
public RepositoryState(SqlContext sqlContext) : base(sqlContext) { _sqlContext = sqlContext; }
public JobMatchesController(SqlContext dbContext) { _dbContext = dbContext; }
public void TestSqlContextCreateDataFrame() { // arrange var mockSparkContextProxy = new Mock<ISparkContextProxy>(); mockSparkContextProxy.Setup(m => m.CreateCSharpRdd(It.IsAny<IRDDProxy>(), It.IsAny<byte[]>(), It.IsAny<Dictionary<string, string>>(), It.IsAny<List<string>>(), It.IsAny<bool>(), It.IsAny<List<Broadcast>>(), It.IsAny<List<byte[]>>())); var rddProxy = new Mock<IRDDProxy>(); var rdd = new RDD<object[]>(rddProxy.Object, new SparkContext(mockSparkContextProxy.Object, new SparkConf())); var dataFrameProxy = new DataFrameIpcProxy(new JvmObjectReference("1"), mockSqlContextProxy.Object); mockSqlContextProxy.Setup(m => m.CreateDataFrame(It.IsAny<IRDDProxy>(), It.IsAny<IStructTypeProxy>())).Returns(dataFrameProxy); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); var structTypeProxy = new Mock<IStructTypeProxy>(); const string schemaJson = @"{ ""fields"": [{ ""metadata"": {}, ""name"": ""guid"", ""nullable"": false, ""type"": ""string"" }], ""type"": ""struct"" }"; structTypeProxy.Setup(m => m.ToJson()).Returns(schemaJson); // act var dataFrame = sqlContext.CreateDataFrame(rdd, new StructType(structTypeProxy.Object)); // assert Assert.AreEqual(dataFrameProxy, dataFrame.DataFrameProxy); }
public RepositoryBase(SqlContext sqlContext) { this.sqlContext = sqlContext; }
public void TestSqlContextRegisterDataFrameAsTable() { // arrange mockSqlContextProxy.Setup(m => m.RegisterDataFrameAsTable(It.IsAny<IDataFrameProxy>(), It.IsAny<string>())); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); var dataFrameProxy = new DataFrameIpcProxy(new JvmObjectReference("1"), mockSqlContextProxy.Object); var dataFrame = new DataFrame(dataFrameProxy, new SparkContext(new SparkConf())); // act sqlContext.RegisterDataFrameAsTable(dataFrame, "table"); // assert mockSqlContextProxy.Verify(m => m.RegisterDataFrameAsTable(dataFrameProxy, "table")); }
public ChargeController(SqlContext ctx) { _ctx = ctx; }
public void TestSqlContextRegisterFunction() { mockSqlContextProxy.Setup(m => m.RegisterFunction(It.IsAny<string>(), It.IsAny<byte[]>(), It.IsAny<string>())); var sqlContext = new SqlContext(new SparkContext("", ""), mockSqlContextProxy.Object); sqlContext.RegisterFunction("Func0", () => "Func0"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func0", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string>("Func1", s => "Func1"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func1", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string>("Func2", (s1, s2) => "Func2"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func2", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string>("Func3", (s1, s2, s3) => "Func3"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func3", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string>("Func4", (s1, s2, s3, s4) => "Func4"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func4", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string>("Func5", (s1, s2, s3, s4, s5) => "Func5"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func5", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string, string>("Func6", (s1, s2, s3, s4, s5, s6) => "Func6"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func6", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string, string, string>("Func7", (s1, s2, s3, s4, s5, s6, s7) => "Func7"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func7", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string, string, string, string>("Func8", (s1, s2, s3, s4, s5, s6, s7, s8) => "Func8"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func8", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string, string, string, string, string>("Func9", (s1, s2, s3, s4, s5, s6, s7, s8, s9) => "Func9"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func9", It.IsAny<byte[]>(), "string")); sqlContext.RegisterFunction<string, string, string, string, string, string, string, string, string, string, string>("Func10", (s1, s2, s3, s4, s5, s6, s7, s8, s9, s10) => "Func10"); mockSqlContextProxy.Verify(m => m.RegisterFunction("Func10", It.IsAny<byte[]>(), "string")); }
public OwnerRepository(SqlContext ctx) { _context = ctx; }
public RepositoryBase(SqlContext Context) { _context = Context; }
public InventaireViewModel(SqlContext sqlContext) { ContextSql = sqlContext; }
public TripDataController(SqlContext context) { _context = context; }
public TaskTemplatesController(IConfiguration configuration, SqlContext context) { _configuration = configuration; _context = context; }
/// <summary> /// Get paged child entities by id /// </summary> /// <param name="id"></param> /// <param name="type"></param> /// <param name="pageNumber"></param> /// <param name="pageSize"></param> /// <param name="orderBy"></param> /// <param name="orderDirection"></param> /// <param name="filter"></param> /// <returns></returns> public PagedResult <EntityBasic> GetPagedChildren( int id, UmbracoEntityTypes type, int pageNumber, int pageSize, string orderBy = "SortOrder", Direction orderDirection = Direction.Ascending, string filter = "") { if (pageNumber <= 0) { throw new HttpResponseException(HttpStatusCode.NotFound); } if (pageSize <= 0) { throw new HttpResponseException(HttpStatusCode.NotFound); } var objectType = ConvertToObjectType(type); if (objectType.HasValue) { var entities = Services.EntityService.GetPagedChildren(id, objectType.Value, pageNumber - 1, pageSize, out var totalRecords, filter.IsNullOrWhiteSpace() ? null : SqlContext.Query <IUmbracoEntity>().Where(x => x.Name.Contains(filter)), Ordering.By(orderBy, orderDirection)); if (totalRecords == 0) { return(new PagedResult <EntityBasic>(0, 0, 0)); } var culture = ClientCulture(); var pagedResult = new PagedResult <EntityBasic>(totalRecords, pageNumber, pageSize) { Items = entities.Select(source => { var target = Mapper.Map <IEntitySlim, EntityBasic>(source, context => { context.SetCulture(culture); }); target.AdditionalData["hasChildren"] = source.HasChildren; return(target); }) }; return(pagedResult); } //now we need to convert the unknown ones switch (type) { case UmbracoEntityTypes.PropertyType: case UmbracoEntityTypes.PropertyGroup: case UmbracoEntityTypes.Domain: case UmbracoEntityTypes.Language: case UmbracoEntityTypes.User: case UmbracoEntityTypes.Macro: default: throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + type); } }
private static SqlContext GetSqlContext() { return sqlContext ?? (sqlContext = new SqlContext(SparkCLRSamples.SparkContext)); }
public PagedResult <EntityBasic> GetPagedDescendants( int id, UmbracoEntityTypes type, int pageNumber, int pageSize, bool ignoreUserStartNodes, string orderBy = "SortOrder", Direction orderDirection = Direction.Ascending, string filter = "") { if (pageNumber <= 0) { throw new HttpResponseException(HttpStatusCode.NotFound); } if (pageSize <= 0) { throw new HttpResponseException(HttpStatusCode.NotFound); } var objectType = ConvertToObjectType(type); if (objectType.HasValue) { IEnumerable <IUmbracoEntity> entities; long totalRecords; if (id == Constants.System.Root) { // root is special: we reduce it to start nodes int[] aids = null; switch (type) { case UmbracoEntityTypes.Document: aids = Security.CurrentUser.CalculateContentStartNodeIds(Services.EntityService); break; case UmbracoEntityTypes.Media: aids = Security.CurrentUser.CalculateMediaStartNodeIds(Services.EntityService); break; } entities = aids == null || aids.Contains(Constants.System.Root) || ignoreUserStartNodes ? Services.EntityService.GetPagedDescendants(objectType.Value, pageNumber - 1, pageSize, out totalRecords, SqlContext.Query <IUmbracoEntity>().Where(x => x.Name.Contains(filter)), Ordering.By(orderBy, orderDirection), includeTrashed : false) : Services.EntityService.GetPagedDescendants(aids, objectType.Value, pageNumber - 1, pageSize, out totalRecords, SqlContext.Query <IUmbracoEntity>().Where(x => x.Name.Contains(filter)), Ordering.By(orderBy, orderDirection)); } else { entities = Services.EntityService.GetPagedDescendants(id, objectType.Value, pageNumber - 1, pageSize, out totalRecords, SqlContext.Query <IUmbracoEntity>().Where(x => x.Name.Contains(filter)), Ordering.By(orderBy, orderDirection)); } if (totalRecords == 0) { return(new PagedResult <EntityBasic>(0, 0, 0)); } var pagedResult = new PagedResult <EntityBasic>(totalRecords, pageNumber, pageSize) { Items = entities.Select(MapEntities()) }; return(pagedResult); } //now we need to convert the unknown ones switch (type) { case UmbracoEntityTypes.PropertyType: case UmbracoEntityTypes.PropertyGroup: case UmbracoEntityTypes.Domain: case UmbracoEntityTypes.Language: case UmbracoEntityTypes.User: case UmbracoEntityTypes.Macro: default: throw new NotSupportedException("The " + typeof(EntityController) + " does not currently support data for the type " + type); } }
static void Main(string[] args) { SparkConf sparkConf = new SparkConf(); SparkContext sc = new SparkContext(sparkConf); SqlContext sqlContext = new SqlContext(sc); var scriptEngine = new RoslynScriptEngine(sc, sqlContext); var repl = new Repl(scriptEngine, new ConsoleIoHandler()); repl.Init(); repl.Run(); scriptEngine.Close(); }
public AccountRepository(SqlContext context) : base(context) { }