static void Main(string[] args) { var factory = new ConnectionFactory() { HostName = "localhost" }; using (var connection = factory.CreateConnection()) { using (var channel = connection.CreateModel()) { channel.QueueDeclare(queue: "videos", durable: false, exclusive: false, autoDelete: false, arguments: null); var consumer = new EventingBasicConsumer(channel); consumer.Received += (model, ea) => { var body = ea.Body.ToArray(); var message = Encoding.UTF8.GetString(body.ToArray()); Console.WriteLine(" [x] Received {0}", message); VideoContext.ScaleVideos(message); }; channel.BasicConsume(queue: "videos", autoAck: true, consumer: consumer); Console.WriteLine(" Press [enter] to exit."); Console.ReadLine(); } } }
public ActionResult Register(Register newUser) { User user = null; if (ModelState.IsValid) { if (newUser.Password.Trim() != newUser.ConfirmPassword.Trim()) { ModelState.AddModelError("ConfirmPassword", "Password fields must be equal"); } // check user in DB using (VideoContext db = new VideoContext()) { user = db.Users.FirstOrDefault(u => u.Name == newUser.Name); } if (user == null) { using (VideoContext db = new VideoContext()) { db.Users.Add(new Models.User { Name = newUser.Name, Password = CalculateMD5Hash(newUser.Password) }); db.SaveChanges(); return(RedirectToAction("RegisterSuccess", "Authentification")); } } else { ModelState.AddModelError("Name", "User with this name exists"); } } return(View(newUser)); }
public void AnnotateVideo() { // Snippet: AnnotateVideo(string,IEnumerable<Feature>,VideoContext,string,string,CallSettings) // Create client VideoIntelligenceServiceClient videoIntelligenceServiceClient = VideoIntelligenceServiceClient.Create(); // Initialize request argument(s) string inputUri = ""; IEnumerable <Feature> features = new List <Feature>(); VideoContext videoContext = new VideoContext(); string outputUri = ""; string locationId = ""; // Make the request Operation <AnnotateVideoResponse, AnnotateVideoProgress> response = videoIntelligenceServiceClient.AnnotateVideo(inputUri, features, videoContext, outputUri, locationId); // Poll until the returned long-running operation is complete Operation <AnnotateVideoResponse, AnnotateVideoProgress> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result AnnotateVideoResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation <AnnotateVideoResponse, AnnotateVideoProgress> retrievedResponse = videoIntelligenceServiceClient.PollOnceAnnotateVideo(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result AnnotateVideoResponse retrievedResult = retrievedResponse.Result; } // End snippet }
public ActionResult SignIn(Login newUser) { if (ModelState.IsValid) { User user = null; using (VideoContext db = new VideoContext()) { user = db.Users.FirstOrDefault(u => u.Name == newUser.Name); } if (user != null) { // hashing password string passwordHash = CalculateMD5Hash(newUser.Password); using (VideoContext db = new VideoContext()) { user = db.Users.FirstOrDefault(u => u.Name == newUser.Name && u.Password == passwordHash); } if (user != null) { FormsAuthentication.SetAuthCookie(newUser.Name, true); Session["genreStatus"] = "all"; return(RedirectToAction("Index", "Home")); } else { ModelState.AddModelError("", $"Name or Password are invalid"); } } else { ModelState.AddModelError("Name", $"User with this name isn't exist. Please sign up as a new user."); } } return(View(newUser)); }
public async Task <ActionResult> Register(RegisterViewModel model) { if (ModelState.IsValid) { var user = new ApplicationUser { UserName = model.Email, Email = model.Email }; var result = await UserManager.CreateAsync(user, model.Password); if (result.Succeeded) { await SignInManager.SignInAsync(user, isPersistent : false, rememberBrowser : false); // Дополнительные сведения о том, как включить подтверждение учетной записи и сброс пароля, см. по адресу: http://go.microsoft.com/fwlink/?LinkID=320771 // Отправка сообщения электронной почты с этой ссылкой // string code = await UserManager.GenerateEmailConfirmationTokenAsync(user.Id); // var callbackUrl = Url.Action("ConfirmEmail", "Account", new { userId = user.Id, code = code }, protocol: Request.Url.Scheme); // await UserManager.SendEmailAsync(user.Id, "Подтверждение учетной записи", "Подтвердите вашу учетную запись, щелкнув <a href=\"" + callbackUrl + "\">здесь</a>"); Profile profile = new Profile(); profile.UserId = Guid.Parse(user.Id); using (VideoContext db = new VideoContext()) { db.Profiles.Add(profile); db.SaveChanges(); } return(RedirectToAction("Index", "Home")); } AddErrors(result); } // Появление этого сообщения означает наличие ошибки; повторное отображение формы return(View(model)); }
public IEnumerable <Video> GetUnprocessedVideos() { using (var context = new VideoContext()) { var videos = (from Video in context.Videos where !Video.IsProcessed select Video).ToList(); return(videos); } }
public void AddVideo(Video video) { using (var videocontext = new VideoContext()) { videocontext.Configuration.ValidateOnSaveEnabled = false; videocontext.Video.Add(video); videocontext.SaveChanges(); } }
public void DeleteVideo(int id) { using (var videocontext = new VideoContext()) { var entity = videocontext.Video.First(x => x.VideoID == id); videocontext.Video.Remove(entity); videocontext.SaveChanges(); } }
private VideoBuffer GetBuffer(IVideoRequestType requestType, int timeoutMs, int pageSize) { if (!_ytVideoBuffers.ContainsKey(requestType)) { VideoContext context = new VideoContext(); _ytVideoBuffers[requestType] = new VideoBuffer(GetVideosInternal(requestType, timeoutMs, pageSize, context), context); } return(_ytVideoBuffers[requestType] as VideoBuffer); }
//public ActionResult Photo() //{ // return View(); //} //public string UpLoadFile(HttpPostedFileBase file) //{ // if(file.FileName.Equals(string.Empty)) // { // return "this is a empty file"; // } // else // { // return "the file name = " + file.FileName; // } //} public ActionResult VideoList(int nowPageNum = 1) { VideoContext context = new VideoContext() { NowPage = nowPageNum, }; context.Init(); return(View(model: context)); }
public void LoadData() { using (var videocontext = new VideoContext()) { videocontext.Configuration.ProxyCreationEnabled = false; _videos = videocontext.Video.ToList(); _authors = videocontext.Authors.ToList(); _genres = videocontext.Genres.ToList(); } }
public IEnumerable <VideoDto> Get() { using (var ctx = new VideoContext()) return(ctx.Videos.ToList().Select(a => new VideoDto { Id = a.Id, Name = a.Name, Link = new Uri(string.Format(@"http://www.riscanet.com/Api/Videos/{0}?apikey={1}", a.Id, HttpContext.Current.Request.QueryString["apikey"])) })); }
static void Main() { var renderForm = new RenderForm(""); SharpDX.DXGI.Factory f = new SharpDX.DXGI.Factory(); SharpDX.DXGI.Adapter a = f.GetAdapter(1); SharpDX.Direct3D11.Device d = new Device(a, DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport); SharpDX.Direct3D11.Device2 d2 = d.QueryInterface <SharpDX.Direct3D11.Device2>(); //d.Dispose();*/ /* var device = new RenderDevice(, 1); * RenderContext context = new RenderContext(device); * * var d = device.Device;*/ var multithread = d.QueryInterface <SharpDX.Direct3D.DeviceMultithread>(); multithread.SetMultithreadProtected(true); // Create a DXGI Device Manager var dxgiDeviceManager = new SharpDX.MediaFoundation.DXGIDeviceManager(); dxgiDeviceManager.ResetDevice(d); VideoDevice vd = d.QueryInterface <VideoDevice>(); //vd.VideoDecoderProfileCount /*VideoDecoderDescription desc = new VideoDecoderDescription() * { * * }*/ VideoContext ctx = d.ImmediateContext.QueryInterface <VideoContext>(); /*var swapChain = new DX11SwapChain(device, renderForm.Handle); * * /*var dx = new VideoDecoderDescription() * { * * }*/ /*int i = vd.VideoDecoderProfileCount; * * renderForm.ResizeEnd += (s, e) => swapChain.Resize(); * * RenderLoop.Run(renderForm, () => * { * context.Context.ClearRenderTargetView(swapChain.RenderView, new SharpDX.Color4(1, 1, 1, 1)); * * swapChain.Present(1, SharpDX.DXGI.PresentFlags.None); * });*/ }
/// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="cancellationToken"> /// A <see cref="CancellationToken"/> to use for this RPC. /// </param> /// <returns> /// A Task containing the RPC response. /// </returns> public virtual Task <Operation <AnnotateVideoResponse, AnnotateVideoProgress> > AnnotateVideoAsync( string inputUri, IEnumerable <Feature> features, VideoContext videoContext, string outputUri, string locationId, CancellationToken cancellationToken) => AnnotateVideoAsync( inputUri, features, videoContext, outputUri, locationId, CallSettings.FromCancellationToken(cancellationToken));
public ActionResult Izbrisi(long?id, FormCollection fcNotUsed) { VideoContext _context; _context = new VideoContext(); var film = _context.Film.Where(x => x.ID == id).FirstOrDefault(); _context.Film.Remove(film); _context.SaveChanges(); return(RedirectToAction("Index", "Film")); }
public IEnumerable <Video> GetUnprocessedVideos() { IList <Video> videos; using (var context = new VideoContext()) { videos = (from video in context.Videos where !video.IsProcessed select video).ToList(); } return(videos); }
public void UpdateVideo(Video video) { using (var videocontext = new VideoContext()) { videocontext.Configuration.ValidateOnSaveEnabled = false; foreach (var entity in videocontext.Video.Where(x => x.VideoID == video.VideoID)) { entity.DatePremiere = video.DatePremiere; entity.GenreID = video.GenreID; entity.Title = video.Title; entity.GenreID = video.GenreID; } videocontext.SaveChanges(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { CollectionViewSource filmViewSource = ((CollectionViewSource)(this.FindResource("filmViewSource"))); CollectionViewSource genreViewSource = ((CollectionViewSource)(this.FindResource("genreViewSource"))); var manager = new VideoContext(); // filmsOb = this.manager.GetFilms(); FilmDBManager mgr = new FilmDBManager(); filmsOb = mgr.GetFilms(); genresOb = mgr.GetGenres(); filmViewSource.Source = filmsOb; genreViewSource.Source = genresOb; filmsOb.CollectionChanged += this.OnCollectionChanged; }
/// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// The RPC response. /// </returns> public virtual Operation <AnnotateVideoResponse, AnnotateVideoProgress> AnnotateVideo( string inputUri, IEnumerable <Feature> features, VideoContext videoContext, string outputUri, string locationId, CallSettings callSettings = null) => AnnotateVideo( new AnnotateVideoRequest { InputUri = GaxPreconditions.CheckNotNullOrEmpty(inputUri, nameof(inputUri)), Features = { GaxPreconditions.CheckNotNull(features, nameof(features)) }, VideoContext = videoContext, // Optional OutputUri = outputUri ?? "", // Optional LocationId = locationId ?? "", // Optional }, callSettings);
/// <summary> /// Performs asynchronous video annotation. Progress and results can be /// retrieved through the `google.longrunning.Operations` interface. /// `Operation.metadata` contains `AnnotateVideoProgress` (progress). /// `Operation.response` contains `AnnotateVideoResponse` (results). /// </summary> /// <param name="inputUri"> /// Input video location. Currently, only /// [Google Cloud Storage](https://cloud.google.com/storage/) URIs are /// supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// A video URI may include wildcards in `object-id`, and thus identify /// multiple videos. Supported wildcards: '*' to match 0 or more characters; /// '?' to match 1 character. If unset, the input video should be embedded /// in the request as `input_content`. If set, `input_content` should be unset. /// </param> /// <param name="features"> /// Requested video annotation features. /// </param> /// <param name="inputContent"> /// The video data bytes. Encoding: base64. If unset, the input video(s) /// should be specified via `input_uri`. If set, `input_uri` should be unset. /// </param> /// <param name="videoContext"> /// Additional video context and/or feature-specific parameters. /// </param> /// <param name="outputUri"> /// Optional location where the output (in JSON format) should be stored. /// Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) /// URIs are supported, which must be specified in the following format: /// `gs://bucket-id/object-id` (other URI formats return /// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see /// [Request URIs](/storage/docs/reference-uris). /// </param> /// <param name="locationId"> /// Optional cloud region where annotation should take place. Supported cloud /// regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region /// is specified, a region will be determined based on video file location. /// </param> /// <param name="callSettings"> /// If not null, applies overrides to this RPC call. /// </param> /// <returns> /// The RPC response. /// </returns> public virtual Operation <AnnotateVideoResponse, AnnotateVideoProgress> AnnotateVideo( string inputUri, IEnumerable <Feature> features, ByteString inputContent, VideoContext videoContext, string outputUri, string locationId, CallSettings callSettings = null) => AnnotateVideo( new AnnotateVideoRequest { InputUri = inputUri ?? "", // Optional Features = { features ?? Enumerable.Empty <Feature>() }, // Optional InputContent = inputContent ?? ByteString.Empty, // Optional VideoContext = videoContext, // Optional OutputUri = outputUri ?? "", // Optional LocationId = locationId ?? "", // Optional }, callSettings);
public string GetUnprocessedVideosAsCsv() { var videoIds = new List <int>(); using (var context = new VideoContext()) { (from video in context.Videos where !video.IsProcessed select video).ToList(); foreach (var v in videoIds) { videoIds.Add(v.Id); } return(String.Join(",", videoIds)); } }
public ActionResult Authorize(Da_ne_bude_da_nije_video.Korisnik korisnik) { using (VideoContext _context = new VideoContext()) { var detalji = _context.Korisnik.Where(x => x.KorisnickoIme == korisnik.KorisnickoIme && x.Lozinka == korisnik.Lozinka).FirstOrDefault(); if (detalji == null) { TempData["msg"] = "<script>alert('Pogresna lozinka ili nepostojece korisnicko ime');</script>"; return(View("Index", korisnik)); } else { Session["PravoPristupa"] = detalji.PravoPristupa; Session["KorisnickoIme"] = detalji.KorisnickoIme; return(RedirectToAction("Index", "Home")); } } }
public HttpResponseMessage Get(int id) { using (var db = new VideoContext()) { var response = Request.CreateResponse(); var video = (from a in db.Videos where a.Id == id select a).FirstOrDefault(); if (video != null) { using (new NetworkConnection()) { var fullPath = video.Path; var mediaStreaming = new MediaStreaming(fullPath); response.Content = new PushStreamContent(mediaStreaming.WriteToStream, new MediaTypeHeaderValue(MediaContentHelper.GetContentTypeFromFileName(video.Path))); } } return(response); } }
//Get my unprocessed videos public string GetUnprocessedVideoAsCsv() { var videoIds = new List <int>(); var context = new VideoContext(); if (context != null) { var videos = (from video in context.Videos where !video.IsProcessed select video).ToList(); foreach (var v in videos) { videoIds.Add(v.Id); } return(String.Join(",", videoIds)); } //Return my converted videos return(videoIds.ToString()); }
uint VideoFormatCallback(ref IntPtr opaque, ref uint chroma, ref uint width, ref uint height, ref uint pitches, ref uint lines) { if (mediaPlayer.Position < position) { mediaPlayer.Position = position; } Console.WriteLine(String.Format("Initialize Video Content : {0}x{1}, Chroma: {2:x}", width, height, chroma)); uint thumbnailMax = 200, thumbnailHeight, thumbnailWidth; _VideoResolution.Width = Convert.ToString(width); _VideoResolution.Height = Convert.ToString(height); if (height > width) { thumbnailHeight = thumbnailMax; thumbnailWidth = (uint)((double)height / (double)width * (double)thumbnailMax); } else { thumbnailWidth = thumbnailMax; thumbnailHeight = (uint)((double)height / (double)width * (double)thumbnailMax); } //thumbnailHeight = height; //thumbnailWidth = width; if (Context == null || Context.Width != width || Context.Height != height) { Context = new VideoContext(thumbnailWidth, thumbnailHeight); } Context.IsAspectRatioChecked = false; chroma = ('R' << 00) | ('V' << 08) | ('3' << 16) | ('2' << 24); width = (uint)Context.Width; height = (uint)Context.Height; pitches = (uint)Context.Stride; lines = (uint)Context.Height; return((uint)Context.Size); }
public DbReader(VideoContext db) { _db = db; }
public TagController(VideoContext context) { _context = context; }
public VideoService(VideoContext videoContext, IMapper mapper) : base(videoContext, mapper) { }
public FILMOVIController() { _context = new VideoContext(); }
private void Initialize() { factory2d = new Factory2D(SharpDX.Direct2D1.FactoryType.MultiThreaded, DebugLevel.Information); HookControl = Control.FromHandle(HookHandle); HookControl.Resize += HookResized; var desc = new SwapChainDescription() { BufferCount = 1, ModeDescription = new ModeDescription(0, 0, new Rational(0, 0), Format.B8G8R8A8_UNorm), // BGRA | Required for Direct2D/DirectWrite (<Win8) IsWindowed = true, OutputHandle = HookHandle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput }; /* [Enable Debug Layer] * https://docs.microsoft.com/en-us/windows/win32/direct3d11/overviews-direct3d-11-devices-layers * https://docs.microsoft.com/en-us/windows/win32/direct3d11/using-the-debug-layer-to-test-apps * * For Windows 7 with Platform Update for Windows 7 (KB2670838) or Windows 8.x, to create a device that supports the debug layer, install the Windows Software Development Kit (SDK) for Windows 8.x to get D3D11_1SDKLayers.dll * For Windows 10, to create a device that supports the debug layer, enable the "Graphics Tools" optional feature. Go to the Settings panel, under System, Apps & features, Manage optional Features, Add a feature, and then look for "Graphics Tools". * */ // Enable on-demand to avoid "Failed to create device issue" //#if DEBUG // Device.CreateWithSwapChain(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.Debug | DeviceCreationFlags.BgraSupport, desc, out device, out swapChain); //#else Device.CreateWithSwapChain(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.BgraSupport, desc, out device, out swapChain); //#endif var factory = swapChain.GetParent <FactoryDX>(); factory.MakeWindowAssociation(HookHandle, WindowAssociationFlags.IgnoreAll); backBuffer = Texture2D.FromSwapChain <Texture2D>(swapChain, 0); rtv = new RenderTargetView(device, backBuffer); context = device.ImmediateContext; factoryWrite = new FactoryDW(); surface = backBuffer.QueryInterface <Surface>(); rtv2d = new RenderTarget(factory2d, surface, new RenderTargetProperties(new PixelFormat(Format.Unknown, SharpDX.Direct2D1.AlphaMode.Premultiplied))); brush2d = new SolidColorBrush(rtv2d, Color.White); brush2dOutline = new SolidColorBrush(rtv2d, Color.Gray); outlineRenderer.renderer = this; string vertexProfile = "vs_5_0"; string pixelProfile = "ps_5_0"; if (device.FeatureLevel == SharpDX.Direct3D.FeatureLevel.Level_9_1 || device.FeatureLevel == SharpDX.Direct3D.FeatureLevel.Level_9_2 || device.FeatureLevel == SharpDX.Direct3D.FeatureLevel.Level_9_3 || device.FeatureLevel == SharpDX.Direct3D.FeatureLevel.Level_10_0 || device.FeatureLevel == SharpDX.Direct3D.FeatureLevel.Level_10_1) { vertexProfile = "vs_4_0_level_9_1"; pixelProfile = "ps_4_0_level_9_1"; } var VertexShaderByteCode = ShaderBytecode.Compile(Properties.Resources.VertexShader, "main", vertexProfile, ShaderFlags.Debug); vertexLayout = new InputLayout(device, VertexShaderByteCode, new[] { new InputElement("POSITION", 0, Format.R32G32B32_Float, 0, 0, InputClassification.PerVertexData, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 12, 0, InputClassification.PerVertexData, 0), }); vertexShader = new VertexShader(device, VertexShaderByteCode); var PixelShaderByteCode = ShaderBytecode.Compile(Properties.Resources.PixelShader, "main", pixelProfile, ShaderFlags.Debug); pixelShader = new PixelShader(device, PixelShaderByteCode); var PixelShaderByteCodeYUV = ShaderBytecode.Compile(Properties.Resources.PixelShader_YUV, "main", pixelProfile, ShaderFlags.Debug); pixelShaderYUV = new PixelShader(device, PixelShaderByteCodeYUV); vertexBuffer = Buffer.Create(device, BindFlags.VertexBuffer, new[] { -1.0f, -1.0f, 0, 0.0f, 1.0f, -1.0f, 1.0f, 0, 0.0f, 0.0f, 1.0f, -1.0f, 0, 1.0f, 1.0f, 1.0f, -1.0f, 0, 1.0f, 1.0f, -1.0f, 1.0f, 0, 0.0f, 0.0f, 1.0f, 1.0f, 0, 1.0f, 0.0f }); SamplerState textureSampler = new SamplerState(device, new SamplerStateDescription() { AddressU = TextureAddressMode.Clamp, AddressV = TextureAddressMode.Clamp, AddressW = TextureAddressMode.Clamp, ComparisonFunction = Comparison.Never, Filter = Filter.MinMagMipLinear, MaximumAnisotropy = 1, MaximumLod = float.MaxValue, MinimumLod = 0, MipLodBias = 0.0f }); context.InputAssembler.InputLayout = vertexLayout; context.InputAssembler.PrimitiveTopology = SharpDX.Direct3D.PrimitiveTopology.TriangleList; context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, Utilities.SizeOf <float>() * 5, 0)); context.VertexShader.Set(vertexShader); context.PixelShader.SetSampler(0, textureSampler); textureRGB = new Texture2D(device, new Texture2DDescription() { Usage = ResourceUsage.Default, Format = Format.R8G8B8A8_UNorm, Width = HookControl.Width, Height = HookControl.Height, BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), ArraySize = 1, MipLevels = 1 }); srvDescYUV = new ShaderResourceViewDescription(); srvDescYUV.Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D; srvDescYUV.Format = Format.R8_UNorm; srvDescYUV.Texture2D.MostDetailedMip = 0; srvDescYUV.Texture2D.MipLevels = 1; // Falling back from videoDevice1/videoContext1 to videoDevice/videoContext to ensure backwards compatibility videoDevice1 = device.QueryInterfaceOrNull <VideoDevice>(); videoContext1 = device.ImmediateContext.QueryInterfaceOrNull <VideoContext>(); if (videoDevice1 == null || videoContext1 == null) { SetViewport(); return; } vpcd = new VideoProcessorContentDescription() { Usage = VideoUsage.PlaybackNormal, InputFrameFormat = VideoFrameFormat.Progressive, InputFrameRate = new Rational(1, 1), OutputFrameRate = new Rational(1, 1), InputWidth = 1, OutputWidth = 1, InputHeight = 1, OutputHeight = 1 }; videoDevice1.CreateVideoProcessorEnumerator(ref vpcd, out vpe); videoDevice1.CreateVideoProcessor(vpe, 0, out videoProcessor); vpivd = new VideoProcessorInputViewDescription() { FourCC = 0, Dimension = VpivDimension.Texture2D, Texture2D = new Texture2DVpiv() { MipSlice = 0, ArraySlice = 0 } }; vpovd = new VideoProcessorOutputViewDescription() { Dimension = VpovDimension.Texture2D }; vpsa = new VideoProcessorStream[1]; SetViewport(); }
private VideoBuffer GetBuffer(IVideoRequestType requestType, int timeoutMs, int pageSize) { if (!_ytVideoBuffers.ContainsKey(requestType)) { VideoContext context = new VideoContext(); _ytVideoBuffers[requestType] = new VideoBuffer(GetVideosInternal(requestType, timeoutMs, pageSize, context), context); } return _ytVideoBuffers[requestType] as VideoBuffer; }
private IEnumerator<IVideo> GetVideosInternal(IVideoRequestType requestType, int timeoutMs, int maxPerPage, VideoContext context) { string baseUrl; if (requestType is MyFavoritesRequestType) { // format the url for downloading baseUrl = String.Format( CultureInfo.InvariantCulture, "http://gdata.youtube.com/feeds/api/users/{0}/favorites", _videoAuth.Username); } else if (requestType is MyVideosRequestType) { baseUrl = String.Format( CultureInfo.InvariantCulture, "http://gdata.youtube.com/feeds/api/users/{0}/uploads", _videoAuth.Username); } else { throw new Exception("Unknown request type."); } int page = 1; while (true) { string queryString = string.Format(CultureInfo.InvariantCulture, "?max-results={0}&start-index={1}", maxPerPage, ((page - 1) * maxPerPage + 1)); string requestUrl = baseUrl + queryString; YouTubeVideo[] videos; int totalResults; // download the document Stream videoListStream = CallYouTubeApi(requestUrl, timeoutMs); // parse it into a list of videos videos = ParseVideoList(videoListStream, out totalResults); context.Available = totalResults; if (videos.Length == 0) { context.Full = true; yield break; } foreach (YouTubeVideo video in videos) yield return video; page++; } }
public VideoBuffer(IEnumerator<IVideo> videoSource, VideoContext context) { _videoSource = videoSource; _context = context; }