Google glass live stream video using RTSP server of Wowza - google-glass

I'm trying to build a Google glass app that supports live streaming. Am aware that Livestream app is available to do this but i don't think we can integrate it in our application or am i wrong? is there a way to integrate the livestream in our app?
I came across this https://github.com/andermaco/GlassStream open source project which do the same thing using RTSP server of Wowza. As per the instructions i have given the user name/password and updated the url. But while running there is an issue while running the application., i tried to debug it but am not successful. This is the log am getting repeatedly
java.lang.IllegalStateException at android.media.MediaCodec.dequeueOutputBuffer(Native Method)
at net.majorkernelpanic.streaming.rtp.MediaCodecInputStream.read(MediaCodecInputStream.java :75)
at net.majorkernelpanic.streaming.rtp.AACLATMPacketizer.run(AACLATMPacketizer.java:88)
at java.lang.Thread.run(Thread.java:841)
Some of the users have used and are successful, Please share me the source code or let me know if am missing something in setting up the server. Even if there are any other resource for implementing, it would be great.
Thanks in Advance.

This the code I've used to get it working on Google Glass (XE22) using Wowza media server and libstreaming.
I've two classes AppConfig and MyActivity.
AppConfig:
package com.example.GlassApp;
/**
* User: Colin Shewell
* Date: 21/08/14
* Time: 15:30
*/
public class AppConfig {
public static final String STREAM_URL = "rtsp://193.61.148.73:1935/serg/android_test";
//public static final String STREAM_URL = "rtsp://192.168.2.2:1935/serg/android_test";
public static final String PUBLISHER_USERNAME = "";
public static final String PUBLISHER_PASSWORD = "";
}
MyActivity:
package com.example.GlassApp;
/**
* User: Colin Shewell
* Date: 21/08/14
* Time: 15:30
*/
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.majorkernelpanic.streaming.Session;
import net.majorkernelpanic.streaming.SessionBuilder;
import net.majorkernelpanic.streaming.audio.AudioQuality;
import net.majorkernelpanic.streaming.gl.SurfaceView;
import net.majorkernelpanic.streaming.rtsp.RtspClient;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.Window;
import android.view.WindowManager;
import net.majorkernelpanic.streaming.video.VideoQuality;
public class MyActivity extends Activity implements RtspClient.Callback, Session.Callback, SurfaceHolder.Callback {
// log tag
public final static String TAG = MyActivity.class.getSimpleName();
// surfaceview
private static SurfaceView mSurfaceView;
// Rtsp session
private Session mSession;
private static RtspClient mClient;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_main);
mSurfaceView = (SurfaceView) findViewById(R.id.surface);
mSurfaceView.getHolder().addCallback(this);
// Initialize RTSP client
initRtspClient();
}
#Override
protected void onResume() {
super.onResume();
toggleStreaming();
}
#Override
protected void onPause(){
super.onPause();
toggleStreaming();
}
private void initRtspClient() {
// Configures the SessionBuilder
mSession = SessionBuilder.getInstance()
.setContext(getApplicationContext())
.setAudioEncoder(SessionBuilder.AUDIO_NONE)
.setVideoEncoder(SessionBuilder.VIDEO_H264)
.setVideoQuality(new VideoQuality(640, 480, 20, 500000)) //only need if you want to change the resolution from default
.setSurfaceView(mSurfaceView).setPreviewOrientation(0)
.setCallback(this).build();
// Configures the RTSP client
mClient = new RtspClient();
mClient.setSession(mSession);
mClient.setCallback(this);
mSurfaceView.setAspectRatioMode(SurfaceView.ASPECT_RATIO_PREVIEW);
String ip, port, path;
// We parse the URI written in the Editext
Pattern uri = Pattern.compile("rtsp://(.+):(\\d+)/(.+)");
Matcher m = uri.matcher(AppConfig.STREAM_URL);
m.find();
ip = m.group(1);
port = m.group(2);
path = m.group(3);
mClient.setCredentials(AppConfig.PUBLISHER_USERNAME,
AppConfig.PUBLISHER_PASSWORD);
mClient.setServerAddress(ip, Integer.parseInt(port));
mClient.setStreamPath("/" + path);
}
private void toggleStreaming() {
if (!mClient.isStreaming()) {
// Start camera preview
mSession.startPreview();
// Start video stream
mClient.startStream();
} else {
// already streaming, stop streaming
// stop camera preview
mSession.stopPreview();
// stop streaming
mClient.stopStream();
}
}
#Override
public void onDestroy() {
super.onDestroy();
mClient.release();
mSession.release();
mSurfaceView.getHolder().removeCallback(this);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
#Override
public void onSessionError(int reason, int streamType, Exception e) {
switch (reason) {
case Session.ERROR_CAMERA_ALREADY_IN_USE:
break;
case Session.ERROR_CAMERA_HAS_NO_FLASH:
break;
case Session.ERROR_INVALID_SURFACE:
break;
case Session.ERROR_STORAGE_NOT_READY:
break;
case Session.ERROR_CONFIGURATION_NOT_SUPPORTED:
break;
case Session.ERROR_OTHER:
break;
}
if (e != null) {
alertError(e.getMessage());
e.printStackTrace();
}
}
private void alertError(final String msg) {
final String error = (msg == null) ? "Unknown error: " : msg;
AlertDialog.Builder builder = new AlertDialog.Builder(MyActivity.this);
builder.setMessage(error).setPositiveButton("Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
#Override
public void onRtspUpdate(int message, Exception exception) {
switch (message) {
case RtspClient.ERROR_CONNECTION_FAILED:
case RtspClient.ERROR_WRONG_CREDENTIALS:
alertError(exception.getMessage());
exception.printStackTrace();
break;
}
}
#Override
public void onPreviewStarted() {
}
#Override
public void onSessionConfigured() {
}
#Override
public void onSessionStarted() {
}
#Override
public void onSessionStopped() {
}
#Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
// #Override
public void onBitrateUpdate(long bitrate) {
}
}
EDIT:
I can confirm that the following video quality settings work:
.setVideoQuality(new VideoQuality(640, 480, 20, 500000))
.setVideoQuality(new VideoQuality(960, 720, 20, 500000))
I'd also like to add that an fps value of over 20 seems to result in the app failing to start.

Related

How to insert data after clearing cache in Cloud Memorystore using Google Cloud Dataflow?

I am working on a task to clear the cache of memorystore if the input file to be processed by dataflow has data. This means, if the input file has no records, the memorystore won't be flushed, but the input file has even one record, the memorystore should be flushed and then the input file should be processed.
My dataflow application is a multi-pipeline application which reads, processes and then stores the data in the memorystore. The pipeline is executing successfully. However, the flushing of the memorystore is working but after flushing, the insertion is not happening.
I have written a function that flushes the memorystore after checking if the input file has a record.
FlushingMemorystore.java
package com.click.example.functions;
import afu.org.checkerframework.checker.nullness.qual.Nullable;
import com.google.auto.value.AutoValue;
import org.apache.beam.sdk.io.redis.RedisConnectionConfiguration;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
import org.apache.beam.vendor.grpc.v1p26p0.com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;
public class FlushingMemorystore {
private static final Logger LOGGER = LoggerFactory.getLogger(FlushingMemorystore.class);
public static FlushingMemorystore.Read read() {
return (new AutoValue_FlushingMemorystore_Read.Builder())
.setConnectionConfiguration(RedisConnectionConfiguration.create()).build();
}
#AutoValue
public abstract static class Read extends PTransform<PCollection<Long>, PDone> {
public Read() {
}
#Nullable
abstract RedisConnectionConfiguration connectionConfiguration();
#Nullable
abstract Long expireTime();
abstract FlushingMemorystore.Read.Builder toBuilder();
public FlushingMemorystore.Read withEndpoint(String host, int port) {
Preconditions.checkArgument(host != null, "host cannot be null");
Preconditions.checkArgument(port > 0, "port cannot be negative or 0");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withHost(host).withPort(port)).build();
}
public FlushingMemorystore.Read withAuth(String auth) {
Preconditions.checkArgument(auth != null, "auth cannot be null");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withAuth(auth)).build();
}
public FlushingMemorystore.Read withTimeout(int timeout) {
Preconditions.checkArgument(timeout >= 0, "timeout cannot be negative");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withTimeout(timeout)).build();
}
public FlushingMemorystore.Read withConnectionConfiguration(RedisConnectionConfiguration connectionConfiguration) {
Preconditions.checkArgument(connectionConfiguration != null, "connection cannot be null");
return this.toBuilder().setConnectionConfiguration(connectionConfiguration).build();
}
public FlushingMemorystore.Read withExpireTime(Long expireTimeMillis) {
Preconditions.checkArgument(expireTimeMillis != null, "expireTimeMillis cannot be null");
Preconditions.checkArgument(expireTimeMillis > 0L, "expireTimeMillis cannot be negative or 0");
return this.toBuilder().setExpireTime(expireTimeMillis).build();
}
public PDone expand(PCollection<Long> input) {
Preconditions.checkArgument(this.connectionConfiguration() != null, "withConnectionConfiguration() is required");
input.apply(ParDo.of(new FlushingMemorystore.Read.ReadFn(this)));
return PDone.in(input.getPipeline());
}
private static class ReadFn extends DoFn<Long, String> {
private static final int DEFAULT_BATCH_SIZE = 1000;
private final FlushingMemorystore.Read spec;
private transient Jedis jedis;
private transient Pipeline pipeline;
private int batchCount;
public ReadFn(FlushingMemorystore.Read spec) {
this.spec = spec;
}
#Setup
public void setup() {
this.jedis = this.spec.connectionConfiguration().connect();
}
#StartBundle
public void startBundle() {
this.pipeline = this.jedis.pipelined();
this.pipeline.multi();
this.batchCount = 0;
}
#ProcessElement
public void processElement(DoFn<Long, String>.ProcessContext c) {
Long count = c.element();
batchCount++;
if(count==null && count < 0) {
LOGGER.info("No Records are there in the input file");
} else {
if (pipeline.isInMulti()) {
pipeline.exec();
pipeline.sync();
jedis.flushDB();
}
LOGGER.info("*****The memorystore is flushed*****");
}
}
#FinishBundle
public void finishBundle() {
if (this.pipeline.isInMulti()) {
this.pipeline.exec();
this.pipeline.sync();
}
this.batchCount=0;
}
#Teardown
public void teardown() {
this.jedis.close();
}
}
#AutoValue.Builder
abstract static class Builder {
Builder() {
}
abstract FlushingMemorystore.Read.Builder setExpireTime(Long expireTimeMillis);
abstract FlushingMemorystore.Read build();
abstract FlushingMemorystore.Read.Builder setConnectionConfiguration(RedisConnectionConfiguration connectionConfiguration);
}
}
}
I am using the function in my Starter Pipeline code.
Code snippet of starter pipeline where the function is being used:
StorageToRedisOptions options = PipelineOptionsFactory.fromArgs(args)
.withValidation()
.as(StorageToRedisOptions.class);
Pipeline p = Pipeline.create(options);
PCollection<String> lines = p.apply(
"ReadLines", TextIO.read().from(options.getInputFile()));
/**
* Flushing the Memorystore if there are records in the input file
*/
lines.apply("Checking Data in input file", Count.globally())
.apply("Flushing the data store", FlushingMemorystore.read()
.withConnectionConfiguration(RedisConnectionConfiguration
.create(options.getRedisHost(), options.getRedisPort())));
Code snippet for the processed data to be inserted after clearing the cache:
dataset.apply(SOME_DATASET_TRANSFORMATION, RedisIO.write()
.withMethod(RedisIO.Write.Method.SADD)
.withConnectionConfiguration(RedisConnectionConfiguration
.create(options.getRedisHost(), options.getRedisPort())));
The dataflow executes fine and it flushes the memorystore as well but the insertion is not working after that. Could you please point out where I am going wrong?
Any solution for resolving the issue is truly appreciated. Thanks in advance!
Edit:
Providing additional information as requested in the comments
The runtime used is Java 11, and it is using Apache Beam SDK for 2.24.0
If the input file has records, it will process the data with some logic. For example, if the input file has data like:
abcabc|Bruce|Wayne|2000
abbabb|Tony|Stark|3423
The dataflow will count the number of records which 2 in this case and will process the id, first name, etc. according to the logic, and then it stores in memorystore. This input file will be coming everyday hence, the memorystore should be cleared (or flushed) if the input file has records.
Although the pipeline is not breaking, but I think I am missing out something.
I suspect the problem here is that you need to ensure the "Flush" step runs (and completes) before the RedisIO.write step happens. Beam has a Wait.on transform that you can use for this.
To accomplish this, we can use the output from the flushing PTransform as a signal that we've flushed the database - and we only write to the database after we are done flushing. The process call for your flushing DoFn would look like this:
#ProcessElement
public void processElement(DoFn<Long, String>.ProcessContext c) {
Long count = c.element();
if(count==null && count < 0) {
LOGGER.info("No Records are there in the input file");
} else {
if (pipeline.isInMulti()) {
pipeline.exec();
pipeline.sync();
jedis.flushDB();
}
LOGGER.info("*****The memorystore is flushed*****");
}
c.output("READY");
}
Once we have a signal pointing that the database has been flushed, we can use it to wait before writing the new data to it:
Pipeline p = Pipeline.create(options);
PCollection<String> lines = p.apply(
"ReadLines", TextIO.read().from(options.getInputFile()));
/**
* Flushing the Memorystore if there are records in the input file
*/
PCollection<String> flushedSignal = lines
.apply("Checking Data in input file", Count.globally())
.apply("Flushing the data store", FlushingMemorystore.read()
.withConnectionConfiguration(RedisConnectionConfiguration
.create(options.getRedisHost(), options.getRedisPort())));
// Then we use the flushing signal to start writing to Redis:
dataset
.apply(Wait.on(flushedSignal))
.apply(SOME_DATASET_TRANSFORMATION, RedisIO.write()
.withMethod(RedisIO.Write.Method.SADD)
.withConnectionConfiguration(RedisConnectionConfiguration
.create(options.getRedisHost(), options.getRedisPort())));
The issue is resolved after I applied to Wait.on transform as Pablo's answer explained it already. However, I had to rewrite my FlushingMemorystore.java a bit to a PCollection for the flushSignal flag.
Here's the function:
package com.click.example.functions;
import afu.org.checkerframework.checker.nullness.qual.Nullable;
import com.google.auto.value.AutoValue;
import org.apache.beam.sdk.io.redis.RedisConnectionConfiguration;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.vendor.grpc.v1p26p0.com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.Pipeline;
public class FlushingMemorystore extends DoFn<Long, String> {
private static final Logger LOGGER = LoggerFactory.getLogger(FlushingMemorystore.class);
public static FlushingMemorystore.Read read() {
return (new AutoValue_FlushingMemorystore_Read.Builder())
.setConnectionConfiguration(RedisConnectionConfiguration.create()).build();
}
#AutoValue
public abstract static class Read extends PTransform<PCollection<Long>, PCollection<String>> {
public Read() {
}
#Nullable
abstract RedisConnectionConfiguration connectionConfiguration();
#Nullable
abstract Long expireTime();
abstract FlushingMemorystore.Read.Builder toBuilder();
public FlushingMemorystore.Read withEndpoint(String host, int port) {
Preconditions.checkArgument(host != null, "host cannot be null");
Preconditions.checkArgument(port > 0, "port cannot be negative or 0");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withHost(host).withPort(port)).build();
}
public FlushingMemorystore.Read withAuth(String auth) {
Preconditions.checkArgument(auth != null, "auth cannot be null");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withAuth(auth)).build();
}
public FlushingMemorystore.Read withTimeout(int timeout) {
Preconditions.checkArgument(timeout >= 0, "timeout cannot be negative");
return this.toBuilder().setConnectionConfiguration(this.connectionConfiguration().withTimeout(timeout)).build();
}
public FlushingMemorystore.Read withConnectionConfiguration(RedisConnectionConfiguration connectionConfiguration) {
Preconditions.checkArgument(connectionConfiguration != null, "connection cannot be null");
return this.toBuilder().setConnectionConfiguration(connectionConfiguration).build();
}
public FlushingMemorystore.Read withExpireTime(Long expireTimeMillis) {
Preconditions.checkArgument(expireTimeMillis != null, "expireTimeMillis cannot be null");
Preconditions.checkArgument(expireTimeMillis > 0L, "expireTimeMillis cannot be negative or 0");
return this.toBuilder().setExpireTime(expireTimeMillis).build();
}
public PCollection<String> expand(PCollection<Long> input) {
Preconditions.checkArgument(this.connectionConfiguration() != null, "withConnectionConfiguration() is required");
return input.apply(ParDo.of(new FlushingMemorystore.Read.ReadFn(this)));
}
#Setup
public Jedis setup() {
return this.connectionConfiguration().connect();
}
private static class ReadFn extends DoFn<Long, String> {
private static final int DEFAULT_BATCH_SIZE = 1000;
private final FlushingMemorystore.Read spec;
private transient Jedis jedis;
private transient Pipeline pipeline;
private int batchCount;
public ReadFn(FlushingMemorystore.Read spec) {
this.spec = spec;
}
#Setup
public void setup() {
this.jedis = this.spec.connectionConfiguration().connect();
}
#StartBundle
public void startBundle() {
this.pipeline = this.jedis.pipelined();
this.pipeline.multi();
this.batchCount = 0;
}
#ProcessElement
public void processElement(#Element Long count, OutputReceiver<String> out) {
batchCount++;
if(count!=null && count > 0) {
if (pipeline.isInMulti()) {
pipeline.exec();
pipeline.sync();
jedis.flushDB();
LOGGER.info("*****The memorystore is flushed*****");
}
out.output("SUCCESS");
} else {
LOGGER.info("No Records are there in the input file");
out.output("FAILURE");
}
}
#FinishBundle
public void finishBundle() {
if (this.pipeline.isInMulti()) {
this.pipeline.exec();
this.pipeline.sync();
}
this.batchCount=0;
}
#Teardown
public void teardown() {
this.jedis.close();
}
}
#AutoValue.Builder
abstract static class Builder {
Builder() {
}
abstract FlushingMemorystore.Read.Builder setExpireTime(Long expireTimeMillis);
abstract FlushingMemorystore.Read build();
abstract FlushingMemorystore.Read.Builder setConnectionConfiguration(RedisConnectionConfiguration connectionConfiguration);
}
}
}

seek bar not working when playing mp3 song from server

In my app I am trying to play a media player from server along with a seek bar. When I tried to play the song from server, my app was working fine but the seek bar was not getting moved ! Also, The seekbar is not working....
It's not displaying MediaPlayer progress
also, It is playing multiple songs at the same time
solution needed for 2 bugs
Here is a screenshot of that app
import android.media.MediaPlayer;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.DividerItemDecoration;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import java.io.IOException;
import java.util.ArrayList;
public class MainActivity2 extends AppCompatActivity {
private ArrayList<SongInfo> _songs = new ArrayList<SongInfo>();
RecyclerView recyclerView;
SeekBar seekBar;
SongAdapter songAdapter;
MediaPlayer mediaPlayer;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
recyclerView = (RecyclerView) findViewById(R.id.recyclerView);
seekBar = (SeekBar) findViewById(R.id.seekBar);
SongInfo s = new SongInfo("Cheap Thrills", "sia", "http://176.126.236.250/33Mmt/music/hindi/movies/new/oh_my_god/Go-Go-Govinda_(webmusic.in).mp3");
_songs.add(s);
s = new SongInfo("Cheap Thrills", "sia", "http://176.126.236.250/33Mmt/music/hindi/movies/new/oh_my_god/Go-Go-Govinda_(webmusic.in).mp3");
_songs.add(s);
songAdapter = new SongAdapter(this, _songs);
recyclerView.setAdapter(songAdapter);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
DividerItemDecoration dividerItemDecoration = new DividerItemDecoration(recyclerView.getContext(),
linearLayoutManager.getOrientation());
recyclerView.addItemDecoration(dividerItemDecoration);
recyclerView.setLayoutManager(linearLayoutManager);
recyclerView.setAdapter(songAdapter);
songAdapter.setOnItemClickListener(new SongAdapter.OnItemClickListener() {
#Override
public void onItemClick(final Button b, View view, SongInfo obj, int position) {
try {
if (b.getText().toString().equals("stop")) {
b.setText("Play");
mediaPlayer.stop();
mediaPlayer.reset();
mediaPlayer.release();
mediaPlayer = null;
}else {
mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(obj.getSongUrl());
mediaPlayer.prepareAsync();
mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mp) {
mp.start();
b.setText("stop");
}
});
}
} catch (IOException e) {
}
}
});
}
}
this is my song adapter code -:
package com.a03.dip.kaliprasadbengalisongs;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import java.util.ArrayList;
public class SongAdapter extends RecyclerView.Adapter<SongAdapter.SongHolder> {
ArrayList<SongInfo> _songs;
Context context;
OnItemClickListener mOnItemClickListener;
SongAdapter(Context context, ArrayList<SongInfo> songs) {
this.context = context;
this._songs = songs;
}
public interface OnItemClickListener {
void onItemClick(Button b ,View view, SongInfo obj, int position);
}
public void setOnItemClickListener(final OnItemClickListener mItemClickListener) {
this.mOnItemClickListener = mItemClickListener;
}
#Override
public SongHolder onCreateViewHolder(ViewGroup viewGroup, int i) {
View myView = LayoutInflater.from(context).inflate(R.layout.row_song,viewGroup,false);
return new SongHolder(myView);
}
#Override
public void onBindViewHolder(final SongHolder songHolder, final int i) {
final SongInfo c = _songs.get(i);
songHolder.songName.setText(_songs.get(i).songName());
songHolder.artistName.setText(_songs.get(i).artistName());
songHolder.btnAction.setOnClickListener(new View.OnClickListener() {
#Override
public void onClick(View v) {
if (mOnItemClickListener != null) {
mOnItemClickListener.onItemClick(songHolder.btnAction,v, c, i);
}
}
});
}
#Override
public int getItemCount() {
return _songs.size();
}
public class SongHolder extends RecyclerView.ViewHolder {
TextView songName,artistName;
Button btnAction;
public SongHolder(View itemView) {
super(itemView);
songName = (TextView) itemView.findViewById(R.id.tvSongName);
artistName = (TextView) itemView.findViewById(R.id.tvArtistName);
btnAction = (Button) itemView.findViewById(R.id.btnPlay);
}
}
}
and here is songInfo class -----
package com.a03.dip.kaliprasadbengalisongs;
import android.media.MediaPlayer;
public class SongInfo {
public String songName ,artistName,songUrl;
public SongInfo() {
}
public SongInfo(String songName, String artistName, String songUrl) {
this.songName = songName;
this.artistName = artistName;
this.songUrl = songUrl;
}
public String songName() {
return songName;
}
public String artistName() {
return artistName;
}
public String getSongUrl() {
return songUrl;
}
}
you have to use seekbar listener on ur activity.
seekBar.setOnSeekBarChangeListener(new >SeekBar.OnSeekBarChangeListener() {
#Override
public void onProgressChanged(SeekBar seekBar, int >progress,
boolean fromUser) {
if (fromUser) {
mPlayer.seekTo(progress);
}
}

Displaying Multiple Images on a Single Google Glass Live Card

I'm creating a live card app that recieves PNGs from a php script running on my server in response to a request from scanning QR codes. At the moment, I simply replace the image on my Live card with the PNG I recieve from the server, but I would like to recieve and display multiple images from the server with each request.
Is there an approved way to show multiple images on a live card? I was thinking there may be a possiblity of generating a menu full of images that simply closed itself when clicked, but it seems like there might be a better alternative.
This is my code at the moment:
Current Code
import com.google.android.glass.timeline.LiveCard;
import com.google.android.glass.timeline.LiveCard.PublishMode;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Binder;
import android.os.IBinder;
import android.util.Base64;
import android.widget.RemoteViews;
public class iotSplashScreen extends Service {
private static final String LIVE_CARD_TAG = "iotSplashScreen";
private LiveCard mLiveCard;
private RemoteViews mLiveCardView;
public class iotBinder extends Binder {
public void changeImage(String change) {
try {
byte[] bob = Base64.decode(change, Base64.DEFAULT);
Bitmap bitmap = BitmapFactory.decodeByteArray(bob, 0, bob.length);
if(bitmap != null) {
mLiveCardView.setImageViewBitmap(R.id.image_view_id, bitmap);
mLiveCard.setViews(mLiveCardView);
}
else
{
System.out.println("Daaang, dat bitmap was null doe");
}
}
catch (IllegalArgumentException e)
{
System.out.println("Base64 had an issues: " + e);
System.out.println(change);
}
catch (NullPointerException e)
{
System.out.println("Null Pointer: " + e);
}
}
}
private final iotBinder mBinder = new iotBinder();
#Override
public IBinder onBind(Intent intent) {
return mBinder;
}
#Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (mLiveCard == null) {
mLiveCard = new LiveCard(this, LIVE_CARD_TAG);
mLiveCardView = new RemoteViews(getPackageName(), R.layout.iot_splash_screen);
mLiveCard.setViews(mLiveCardView);
// Display the options menu when the live card is tapped.
Intent menuIntent = new Intent(this, LiveCardMenuActivity.class);
mLiveCard.setAction(PendingIntent.getActivity(this, 0, menuIntent, 0));
mLiveCard.publish(PublishMode.REVEAL);
} else {
mLiveCard.navigate();
}
return START_STICKY;
}
#Override
public void onDestroy() {
if (mLiveCard != null && mLiveCard.isPublished()) {
mLiveCard.unpublish();
mLiveCard = null;
}
super.onDestroy();
}
}
Simply add more ImageViews, either in your layout file (iot_splash_screen) or programmatically.
With the resource IDs of your ImageViews, you can call setImageViewResource on each one.
Make sure that you are setting these images before calling setViews on your Live Card.

Google Glass and SpeechRecognizer class

I've been trying to use SpeechRecognizer class on an activity on Google Glass
I run this code on a Motorola Razor and it works well.
I have not been successful doing this on Glass
"no selected voice recognition service" is
the error I get back when
sr.startListening(intent) is called;
I am aware of the activityForResult methods of voice recognition, However I'm looking for something that will run within my Activity, Thanks.
public class MainActivity extends Activity {
SpeechRecognizer sr;
TextView mText;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
sr = SpeechRecognizer.createSpeechRecognizer(this);
sr.setRecognitionListener(new listener());
mText = (TextView) findViewById(R.id.resultsText);
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,this.getPackageName());
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,5);
sr.startListening(intent);
}
class listener implements RecognitionListener
{
private static final String TAG = "Speech";
public void onReadyForSpeech(Bundle params)
{
Log.d(TAG, "onReadyForSpeech");
}
public void onBeginningOfSpeech()
{
Log.d(TAG, "onBeginningOfSpeech");
}
public void onRmsChanged(float rmsdB)
{
Log.d(TAG, "onRmsChanged");
}
public void onBufferReceived(byte[] buffer)
{
Log.d(TAG, "onBufferReceived");
}
public void onEndOfSpeech()
{
Log.d(TAG, "onEndofSpeech");
}
public void onError(int error)
{
Log.d(TAG, "error " + error);
mText.setText("error " + error);
}
public void onResults(Bundle results)
{
String str = new String();
Log.d(TAG, "onResults " + results);
ArrayList data = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
for (int i = 0; i < data.size(); i++)
{
Log.d(TAG, "result " + data.get(i));
str += data.get(i);
}
mText.setText("results: "+String.valueOf(data.size()));
}
public void onPartialResults(Bundle partialResults)
{
Log.d(TAG, "onPartialResults");
}
public void onEvent(int eventType, Bundle params)
{
Log.d(TAG, "onEvent " + eventType);
}
}
}
This use case of SpeechRecognizer is not yet supported; right now, you can only launch an activity using RecognizerIntent to transcribe speech.
Please feel free to follow issue 245, which covers this, on our issue tracker so that you can stay updated as the GDK evolves!
If you are still have this problem, you should use RecognizerIntent.ACTION_RECOGNIZE_SPEECH.
Example implementation is
private static final int SPEECH_REQUEST = 0;
// will show the microphone and lets user speak to capture speech
private void displaySpeechRecognizer() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
startActivityForResult(intent, SPEECH_REQUEST);
}
#Override
protected void onActivityResult(int requestCode, int resultCode,
Intent data) {
if (requestCode == SPEECH_REQUEST && resultCode == RESULT_OK) {
List<String> results = data.getStringArrayListExtra(
RecognizerIntent.EXTRA_RESULTS);
// the first string in the results list is considered the best match.
String spokenText = results.get(0);
// Do something with spokenText.
}
super.onActivityResult(requestCode, resultCode, data);
}
You can read more at : https://developers.google.com/glass/develop/gdk/voice?hl=en#starting_speech_recognition and http://developer.android.com/reference/android/speech/RecognizerIntent.html#ACTION_RECOGNIZE_SPEECH

Jmf Mp3 files not streaming on rtp

The player mp3 gives error:
RTP Handler internal error: javax.media.ControllerErrorEvent[source=com.sun.medi
a.content.unknown.Handler#baf4ae,message=Internal module com.sun.media.BasicRend
ererModule#197f158: failed to handle a data format change!]
i m running
server as:java MediaConverterExample rtp://rajneesh-pc:49150/audio Dead_End.mp3
client as:java PlayerExample rtp://rajneesh-pc:49150/audio
this is server side code
import javax.media.*;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.MalformedURLException;
import javax.media.protocol.*;
import javax.media.format.AudioFormat;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
class MediaConvertion
{
private MediaLocator mediaLocator = null;
private DataSink dataSink = null;
private Processor mediaProcessor = null;
private static final Format[] FORMATS = new Format[] { new AudioFormat(AudioFormat.MPEG_RTP)};
private static final ContentDescriptor CONTENT_DESCRIPTOR =new ContentDescriptor (ContentDescriptor.RAW_RTP);
public MediaConvertion(String url)throws IOException,NoProcessorException, CannotRealizeException, NoDataSinkException, NoDataSinkException
{
mediaLocator=new MediaLocator(url);
}
public void setDataSource(DataSource ds) throws IOException,NoProcessorException, CannotRealizeException, NoDataSinkException {
mediaProcessor = Manager.createRealizedProcessor(new ProcessorModel(ds, FORMATS, CONTENT_DESCRIPTOR));
dataSink = Manager.createDataSink(mediaProcessor.getDataOutput(),mediaLocator);
}
public void startTransmitting() throws IOException {
mediaProcessor.start();
dataSink.open();
dataSink.start();
}
public void stopTransmitting() throws IOException {
dataSink.stop();
dataSink.close();
mediaProcessor.stop();
mediaProcessor.close();
}
}
public class MediaConverterExample extends Frame implements ActionListener
{
Button st_stream;
static MediaConvertion mdcon;
public static void main(String args[])throws IOException,NoProcessorException, CannotRealizeException, NoDataSinkException,MalformedURLException,NoDataSourceException
{
Format input1 = new AudioFormat(AudioFormat.MPEGLAYER3);
Format input2 = new AudioFormat(AudioFormat.MPEG);
Format output = new AudioFormat(AudioFormat.LINEAR);
PlugInManager.addPlugIn(
"com.sun.media.codec.audio.mp3.JavaDecoder",
new Format[]{input1, input2},
new Format[]{output},
PlugInManager.CODEC
);
File mediaFile = new File(args[1]);
DataSource source = Manager.createDataSource(new MediaLocator(mediaFile.toURL()));
mdcon=new MediaConvertion(args[0]);
mdcon.setDataSource(source);
new MediaConverterExample();
}
public MediaConverterExample()
{
st_stream=new Button("Start Streaming");
add(st_stream);
st_stream.addActionListener(this);
setVisible(true);
setSize(200,300);
}
public void actionPerformed(ActionEvent ae)
{
try
{
mdcon.startTransmitting();
}
catch(Exception e){
}
}
}
this is client side code
import javax.media.*;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.MalformedURLException;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import javax.media.format.*;
public class PlayerExample extends JFrame implements ActionListener
{
Button play;
SimpleAudioPlayer sap;
PlayerExample(String playFile) throws IOException,NoPlayerException, CannotRealizeException
{
sap=new SimpleAudioPlayer(new MediaLocator(playFile));
setLayout(new BorderLayout());
//add(sap.VideoComponent(),BorderLayout.CENTER);
//add(sap.AudioComponent(),BorderLayout.WEST);
//add(sap.ControlComponent(),BorderLayout.NORTH);
add(play=new Button("play"),BorderLayout.SOUTH);
play.addActionListener(this);
setSize(200,300);
setVisible(true);
}
public void actionPerformed(ActionEvent ae)
{
sap.play();
}
public static void main(String args[])throws IOException,NoPlayerException, CannotRealizeException
{
Format input1 = new AudioFormat(AudioFormat.MPEGLAYER3);
Format input2 = new AudioFormat(AudioFormat.MPEG);
Format output = new AudioFormat(AudioFormat.LINEAR);
PlugInManager.addPlugIn(
"com.sun.media.codec.audio.mp3.JavaDecoder",
new Format[]{input1, input2},
new Format[]{output},
PlugInManager.CODEC
);
new PlayerExample(args[0]);
}
}
class SimpleAudioPlayer {
private Player videoPlayer = null;
public SimpleAudioPlayer(MediaLocator ml) throws IOException, NoPlayerException, CannotRealizeException {
videoPlayer = Manager.createRealizedPlayer(ml);
}
public void play() {
//videoPlayer.deallocate();
videoPlayer.start();
}
public void stop() {
videoPlayer.stop();
}
public Component VideoComponent(){
return videoPlayer.getVisualComponent();
}
public Component ControlComponent(){
return videoPlayer.getControlPanelComponent();
}
public Component AudioComponent(){
return videoPlayer.getGainControl().getControlComponent();
}
}
Please help I did every thing I could. Please advise me of any other details I might be missing, server runs fine, but client blocks at player creation. The audio format I am using is MPEG_RAW. Eagerly looking for an answer, thanks in advance.
SOLVED......
i figured when looked at oracle forum
change
AudioFormat.MPEG_RTP to AudioFormat.DVI_RTP
don't ask a reason why mp3 didn't work.i did everything i could.
please tell me why previous didn't work.