We managed to create a working Miracast sink using UWP and we wanted to use this functionality inside a .NET Core application. So we followed this guide to use UWP apis in a .NET Core project:
Using UWP apis with WPF and .NET Core
The project runs, we get a connection from a Smartphone to the application but then we don't receive any video frame from the MediaPlayer object (Unlike on the original UWP project in which is working correctly)
We observed that in the MediaSource object we obtain a mcrecv url (Example -> mcrecv://192.168.137.247:7236/h-0000000c/192.168.137.1)
But then the MediaPlayer consuming it doesn't fire any VideoFrameAvailable event.
How can we solve this? Following is the basic implementation we used:
using System;
using System.Diagnostics;
using System.Windows;
using Windows.Graphics.Imaging;
using Windows.Media.Miracast;
using Windows.Media.Playback;
namespace Miracast_GUI
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MiracastReceiver receiver;
public MiracastReceiverSession session;
public MediaPlayer mp;
public SoftwareBitmap frameServerDest;
public MiracastReceiverConnection connection;
public MainWindow()
{
InitializeComponent();
// Starts service
StartMiracastService();
}
public void StartMiracastService()
{
receiver = new MiracastReceiver();
receiver.StatusChanged += Receiver_StatusChanged;
MiracastReceiverSettings settings = receiver.GetDefaultSettings();
settings.FriendlyName = "Miracast-Service-Test";
settings.AuthorizationMethod = MiracastReceiverAuthorizationMethod.None;
settings.ModelName = receiver.GetDefaultSettings().ModelName;
settings.ModelNumber = receiver.GetDefaultSettings().ModelNumber;
settings.RequireAuthorizationFromKnownTransmitters = receiver.GetDefaultSettings().RequireAuthorizationFromKnownTransmitters;
receiver.DisconnectAllAndApplySettings(settings);
session = receiver.CreateSession(/*CoreApplication.MainView*/ null);
session.AllowConnectionTakeover = true;
session.ConnectionCreated += Session_ConnectionCreated;
session.MediaSourceCreated += Session_MediaSourceCreated;
session.Disconnected += Session_Disconnected;
MiracastReceiverSessionStartResult result = session.Start();
Debug.WriteLine("Status: " + result.Status);
}
private void Session_Disconnected(MiracastReceiverSession sender, MiracastReceiverDisconnectedEventArgs args)
{
session.Dispose();
}
private void Receiver_StatusChanged(MiracastReceiver sender, object args)
{
Debug.WriteLine(receiver.GetStatus().ListeningStatus);
}
private void Session_ConnectionCreated(MiracastReceiverSession sender, MiracastReceiverConnectionCreatedEventArgs args)
{
connection = args.Connection;
connection.InputDevices.Keyboard.TransmitInput = true;
connection.InputDevices.GameController.Mode =
MiracastReceiverGameControllerDeviceUsageMode.AsMouseAndKeyboard;
Debug.WriteLine("CONNECTION CREATED");
}
private void Session_MediaSourceCreated(MiracastReceiverSession sender, MiracastReceiverMediaSourceCreatedEventArgs args)
{
mp = new MediaPlayer
{
IsVideoFrameServerEnabled = true,
AutoPlay = true,
Source = args.MediaSource,
RealTimePlayback = true
};
mp.VideoFrameAvailable += Mp_VideoFrameAvailable;
Debug.WriteLine(mp.PlaybackSession.PlaybackState);
mp.Play();
Debug.WriteLine("MEDIA SOURCE CREATED");
}
private void Mp_VideoFrameAvailable(MediaPlayer sender, object args)
{
Console.WriteLine("Received frame...");
}
}
}