In my app , i'm using a RenderTexture as a Canvas.
I would like to save this canvas to android photo gallery , like this:
The cpp side :
void Canvas::saveCanvasCallback(cocos2d::Ref *sender)
{
time_t t = time(0);
struct tm * timeinfo= localtime (&t);
char buffer[120];
strftime(buffer,120,"image_%d%m%Y%I%M%S.png",timeinfo);
auto callback = [&](RenderTexture* rt, const std::string& path)
{
#if CC_TARGET_PLATFORM == CC_PLATFORM_ANDROID
addImageToGallery(path.c_str());
#else
CGameManager_iOSBridge::addImageToGallery(path.c_str());
#endif
};
_target->saveToFile(buffer, Image::Format::PNG, true, callback);
CCLOG("Image saved %s \n", buffer);
}
The java side :
static void addImageToGallery (String path) {
ContentValues values = new ContentValues();
values.put(Images.Media.DATE_ADDED, System.currentTimeMillis());
values.put(Images.Media.DATE_TAKEN, System.currentTimeMillis());
values.put(Images.Media.MIME_TYPE, "image/png");
values.put(MediaStore.MediaColumns.DATA, Environment.getExternalStorageDirectory().getPath()+path);
getContentResolver().insert(Images.Media.EXTERNAL_CONTENT_URI, values);
me.runOnUiThread(new Runnable() {
#Override
public void run() {
Toast.makeText(me.getApplicationContext(), "Image saved ",
Toast.LENGTH_LONG).show();
}
});
}
The saved image it's black(empty) in the gallery ( for iOS it works fine) .
I'm working with cocos2d-x V3.3
Please help me
Thanks
Related
I am using a custom webview to convert my HTML data on the ios app. I am looking for the same custom renderer on android and windows.
MyWebView.cs
public class MyWebView : WebView
{
public static readonly BindableProperty UrlProperty = BindableProperty.Create(
propertyName: "Url",
returnType: typeof(string),
declaringType: typeof(MyWebView),
defaultValue: default(string));
public string Url
{
get { return (string)GetValue(UrlProperty); }
set { SetValue(UrlProperty, value); }
}
}
MyWebViewRenderer.cs on ios
[assembly: ExportRenderer(typeof(MyWebView), typeof(MyWebViewRenderer))]
namespace MyApp.iOS.Renderer
{
public class MyWebViewRenderer : ViewRenderer<MyWebView, WKWebView>
{
WKWebView _wkWebView;
protected override void OnElementChanged(ElementChangedEventArgs<MyWebView> e)
{
base.OnElementChanged(e);
if (Control == null)
{
var config = new WKWebViewConfiguration();
_wkWebView = new WKWebView(Frame, config);
_wkWebView.NavigationDelegate = new MyNavigationDelegate();
SetNativeControl(_wkWebView);
}
}
public class MyNavigationDelegate : WKNavigationDelegate
{
public override void DidFinishNavigation(WKWebView webView, WKNavigation navigation)
{
string fontSize = "";
if (Device.Idiom == TargetIdiom.Phone)
{
fontSize = "250%";
}
else if (Device.Idiom == TargetIdiom.Tablet)
{
fontSize = "375%";
}
string stringsss = String.Format(#"document.getElementsByTagName('body')[0].style.webkitTextSizeAdjust= '{0}'", fontSize);
WKJavascriptEvaluationResult handler = (NSObject result, NSError err) =>
{
if (err != null)
{
System.Console.WriteLine(err);
}
if (result != null)
{
System.Console.WriteLine(result);
}
};
webView.EvaluateJavaScript(stringsss, handler);
}
}
protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs e)
{
base.OnElementPropertyChanged(sender, e);
if (e.PropertyName == "Url")
{
Control.LoadHtmlString(Element.Url, null);
}
}
}
}
XAML and XAML.cs
<local:MyWebView
x:Name="web_view"
web_view.Url = "htmldata";
Output screenshot on ios device:
Sample HTML data added here. I need the same output on android and windows platforms, so requesting custom render codes for android and windows platforms.
If you want to present a string of HTML defined dynamically in code, you'll need to create an instance of HtmlWebViewSource:
var htmlSource = new HtmlWebViewSource();
htmlSource.Html = #"copy the html string here";
web_view.Source = htmlSource;
I am trying to replicate a simple addition of 2 entries with a button in Monodevelop (shown how to make it step-by-step) but somehow the window closes 2 seconds after pressing the button without actually changing anything.
The code for the button:
using Gtk;
using Frontend.ChatService;
public partial class MainWindow : Gtk.Window
{
public MainWindow() : base(Gtk.WindowType.Toplevel)
{
Build();
}
protected void OnDeleteEvent(object sender, DeleteEventArgs a)
{
Application.Quit();
a.RetVal = true;
}
protected void OnButton1Clicked(object sender, EventArgs e)
{
ChatService client = new ChatService();
int x = Int32.Parse(entry1.Text);
int y = Int32.Parse(entry2.Text);
int sum = client.Add(x, y);
entry1.Text = sum.ToString();
}
}
And the sum (which I tested and think works):
using System;
using System.Web;
using System.Web.Services;
namespace Backend
{
public class ChatService : System.Web.Services.WebService
{
[WebMethod]
public int Add(int x, int y)
{
return x + y;
}
}
}
I left the main file program.cs as generated and is:
using System;
using Gtk;
namespace Frontend
{
class MainClass
{
public static void Main(string[] args)
{
Application.Init();
MainWindow win = new MainWindow();
win.Show();
Application.Run();
}
}
}
The window does pop up as it should and shows no problem until the button is pressed.
Edit:
I forgot to run the backend / server part, which is why the function was not found... (beginners mistake I guess)
Works now
The problem is probably that your code throws an exception you are not aware of. The problem is in the code that handles the button being clicked.
protected void OnButton1Clicked(object sender, EventArgs e)
{
ChatService client = new ChatService();
int x = Int32.Parse(entry1.Text);
int y = Int32.Parse(entry2.Text);
int sum = client.Add(x, y);
entry1.Text = sum.ToString();
}
Let's go line by line:
ChatService client = new ChatService();
Here you are creating a new instance of what it seems to be a system service or maybe a web services. This could throw if the service is not known (in the former case), or if the connection is interrupted or does not reach a destination, etc., in the latter case.
These lines are also delicate:
int x = Int32.Parse(entry1.Text);
int y = Int32.Parse(entry2.Text);
They will throw in case the field entry1 or entry2 are empty, or contain a letter...
In order to manage these cases you need to add try... catch blocks in the appropriate places. Alternately, you can use Int32.TryParse.
For example, assuming the service is in the web:
protected void OnButton1Clicked(object sender, EventArgs e)
{
ChatService client;
int x;
int y;
try {
client = new ChatService();
} catch(HttpRequestException exc) {
client = null;
var dlg = new Gtk.MessageDialog(
this,
Gtk.DialogFlags.Modal,
Gtk.MessageType.Error,
Gtk.ButtonsType.Ok,
"Connection error"
);
dlg.Text = exc.Message;
dlg.Run();
dlg.Destroy();
}
if ( client != null ) {
if ( !int.TryParse( entry1.Text, out x) {
entry1.Text = "0";
x = 0;
}
if ( !int.TryParse( entry2.Text, out y) {
entry2.Text = "0";
y = 0;
}
int sum = client.Add(x, y);
entry1.Text = sum.ToString();
}
}
Getting code which correctly handles errors is always harder, of course.
Hope this helps.
Sir,
Developed an application using Xamarin on Visual Studio 2017 where I need to locate the current location of mobile when user turn on Location on his android device and then mark that location on Google Map using MarkerOption. For this I have followed steps as guided in following video https://www.youtube.com/watch?v=rCZN1c2azyE. But didn't got current location on map.
Below is my code:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Android.App;
using Android.Content;
using Android.OS;
using Android.Runtime;
using Android.Views;
using Android.Widget;
using Android.Gms.Maps;
using Android.Locations;
using Android.Util;
using Android.Gms.Maps.Model;
namespace smvdappdev
{
[Activity(Label = "LOCATION MAP")]
//for google map, for gps location
public class UserLocationMap_Act : Activity, IOnMapReadyCallback, ILocationListener
{
//Map variable
private GoogleMap gooMap;
//Location
LocationManager locManager;
String provider;
protected override void OnCreate(Bundle savedInstanceState)
{
base.OnCreate(savedInstanceState);
// Create your application here
SetContentView(Resource.Layout.UserLocationMap);
//Back Button
ActionBar.SetDisplayHomeAsUpEnabled(true);
//Method for Map
SetUpMap();
locManager = (LocationManager)GetSystemService(Context.LocationService);
provider = locManager.GetBestProvider(new Criteria(), false);
Location location = locManager.GetLastKnownLocation(provider);
if (location == null)
{
System.Diagnostics.Debug.WriteLine("No location available!");
}
}
public override bool OnOptionsItemSelected(IMenuItem item)
{
switch (item.ItemId)
{
case Android.Resource.Id.Home:
Finish();
return true;
default:
return base.OnOptionsItemSelected(item);
}
}
//for setting map on fragment placed on activity page
private void SetUpMap()
{
if (gooMap == null)
{
FragmentManager.FindFragmentById<MapFragment>(Resource.Id.fragment1).GetMapAsync(this);
}
}
//to draw map on map display view
public void OnMapReady(GoogleMap googleMap)
{
this.gooMap = googleMap;
//LatLng latlng = new LatLng()
//MarkerOptions mo = new MarkerOptions();
//mo.SetPosition(new LatLng(Convert.ToDouble(32.73), Convert.ToDouble(74.86)));
//mo.SetTitle("Civil Secretarait Jammu");
//googleMap.AddMarker(mo);
googleMap.UiSettings.CompassEnabled = true;
googleMap.UiSettings.ZoomControlsEnabled = true;
googleMap.MoveCamera(CameraUpdateFactory.ZoomIn());
//throw new NotImplementedException();
}
//*** Here all code for getting location via GPS
protected override void OnResume()
{
base.OnResume();
provider = LocationManager.GpsProvider;
//if (locManager.IsProviderEnabled(provider))
//{
locManager.RequestLocationUpdates(provider, 2000, 1, this);
//}
//else
//{
// Log.Info(tag, provider + " is not available. Does the device have location services enabled?");
//}
}
protected override void OnPause()
{
base.OnPause();
locManager.RemoveUpdates(this);
}
public void OnProviderEnabled(string provider)
{
}
public void OnProviderDisabled(string provider)
{
}
public void OnStatusChanged(string provider, Availability status, Bundle extras)
{
}
public void OnLocationChanged(Location location)
{
Double lat, lng;
lat = location.Latitude;
lng = location.Longitude;
MarkerOptions mo = new MarkerOptions();
mo.SetPosition(new LatLng(lat, lng));
//Toast.MakeText(this, "Latitude:" + lat.ToString() + ", Longitude:" + lng.ToString(), ToastLength.Long).Show();
mo.SetTitle("You are here!");
gooMap.AddMarker(mo);
CameraPosition.Builder builder = CameraPosition.InvokeBuilder();
builder.Target(new LatLng(lat, lng));
CameraPosition camPos = builder.Build();
CameraUpdate camUpdate = CameraUpdateFactory.NewCameraPosition(camPos);
gooMap.MoveCamera(camUpdate);
}
}
}
You can just use gooMap.MyLocationEnabled = true. You need to check and request location permission first in order for this to work.
With MyLocationEnabled set to true, it will show a precision circle and a blue dot showing where you are.
I record some audio using libgdx AudioRecorder which return short[]. I think I´ve converted it into byte[] instead so it can be stored in a file cause I need the audio to be able to playback anytime.
Here´s some variables I use:
final int samples = 44100;
boolean isMono = true;
final short[] data = new short[samples * 5];
final AudioRecorder recorder = Gdx.audio.newAudioRecorder(samples, isMono);
final AudioDevice player = Gdx.audio.newAudioDevice(samples, isMono);
And here I start and play the audio, also I think I´ve converted the short[] into byte[] , correct me if Im wrong.
public void startRecord(){
new Thread(new Runnable() {
#Override
public void run() {
System.out.println("Record start:");
recorder.read(data, 0, data.length);
recorder.dispose();
ShortBuffer buffer = BufferUtils.newShortBuffer(2);
buffer.put(data[1]);
}
}).start();
}
public void playRecorded(){
new Thread(new Runnable() {
#Override
public void run() {
player.writeSamples(data, 0, data.length);
player.dispose();
}
}).start();
}
Here´s an example on how I´ve stored byte[] before. But I can´t implement this teqhnique on this method. Bare in mind I need it to be a libgdx solution.
public void onImagePicked(final InputStream stream) {
loading = "Loading";
pending = executor.submit(new AsyncTask<Pixmap>() {
#Override
public Pixmap call() throws Exception {
StreamUtils.copyStream(stream, file.write(false));
final byte[] bytes = file.readBytes();
final Pixmap pix = new Pixmap(bytes, 0, bytes.length);
return pix;
}
});
}
Hi it is actually very simple. You can convert a byte array to short like this:
// get all bytes
byte[] temp = ...
// create short with half the length (short = 2 bytes)
short[] data = new short[temp.length / 2];
// cast a byte array to short array
ByteBuffer.wrap(temp).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(data);
//data now has the short array in it
You can convert a short array to byte like this:
// audio data
short[] data = ...;
//create a byte array to hold the data passed (short = 2 bytes)
byte[] temp = new byte[data.length * 2];
// cast a short array to byte array
ByteBuffer.wrap(temp).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(data);
// temp now has the byte array
Sample project
I created a sample project in github that you can clone to see how this works.
Sample project here
However this is the Game class for the sample project if you only want to take a quick look at it:
package com.leonziyo.recording;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.audio.AudioDevice;
import com.badlogic.gdx.audio.AudioRecorder;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.utils.GdxRuntimeException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class MainGame extends ApplicationAdapter {
boolean recordTurn = true;
final int samples = 44100;
boolean isMono = true, recording = false, playing = false;
#Override
public void create () {}
#Override
public void render () {
/*Changing the color just to know when it is done recording or playing audio (optional)*/
if(recording)
Gdx.gl.glClearColor(1, 0, 0, 1);
else if(playing)
Gdx.gl.glClearColor(0, 1, 0, 1);
else
Gdx.gl.glClearColor(0, 0, 1, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
// We trigger recording and playing with touch for simplicity
if(Gdx.input.justTouched()) {
if(recordTurn)
recordToFile("sound.bin", 3); //pass file name and number of seconds to record
else
playFile("sound.bin"); //file name to play
recordTurn = !recordTurn;
}
}
#Override
public void dispose() {
}
private void recordToFile(final String filename, final int seconds) {
//Start a new thread to do the recording, because it will block and render won't be called if done in the main thread
new Thread(new Runnable() {
#Override
public void run() {
try {
recording = true;
short[] data = new short[samples * seconds];
AudioRecorder recorder = Gdx.audio.newAudioRecorder(samples, isMono);
recorder.read(data, 0, data.length);
recorder.dispose();
saveAudioToFile(data, filename);
}
catch(GdxRuntimeException e) {
Gdx.app.log("test", e.getMessage());
}
finally {
recording = false;
}
}
}).start();
}
private void playFile(final String filename) {
//Start a new thread to play the file, because it will block and render won't be called if done in the main thread
new Thread(new Runnable() {
#Override
public void run() {
try {
playing = true;
short[] data = getAudioDataFromFile(filename); //get audio data from file
AudioDevice device = Gdx.audio.newAudioDevice(samples, isMono);
device.writeSamples(data, 0, data.length);
device.dispose();
}
catch(GdxRuntimeException e) {
Gdx.app.log("test", e.getMessage());
}
finally {
playing = false;
}
}
}).start();
}
private short[] getAudioDataFromFile(String filename) {
FileHandle file = Gdx.files.local(filename);
byte[] temp = file.readBytes(); // get all bytes from file
short[] data = new short[temp.length / 2]; // create short with half the length (short = 2 bytes)
ByteBuffer.wrap(temp).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(data); // cast a byte array to short array
return data;
}
private void saveAudioToFile(short[] data, String filename) {
byte[] temp = new byte[data.length * 2]; //create a byte array to hold the data passed (short = 2 bytes)
ByteBuffer.wrap(temp).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(data); // cast a short array to byte array
FileHandle file = Gdx.files.local(filename);
file.writeBytes(temp, false); //save bytes to file
}
}
Don't forget to add permission in case you are writing to external storage and permission for audio recording:
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
I used setString but the string is not updated, so I have to write a CCLabel to show the string, which I feel very weird because showing the user input should be part of the textfield.. Do I missed anything?
I read the test_input example, it uses a CCLabel to show the user input, which I think is a really bad design.
You don't need a Label to show user input.
I have edited the default HelloWorld file with added CCTextFieldttf working example.
This is how your HelloWorld.h file should look
class HelloWorld : public cocos2d::CCLayer, public cocos2d::CCTextFieldDelegate
{
public:
virtual bool init();
static cocos2d::CCScene* scene();
void createTF();
cocos2d::CCTextFieldTTF* tf;
virtual bool ccTouchBegan(CCTouch *pTouch, CCEvent *pEvent);
virtual void ccTouchEnded(CCTouch *pTouch, CCEvent *pEvent);
virtual void registerWithTouchDispatcher();
//CCtextFieldttf delegate begin
virtual bool onTextFieldAttachWithIME(CCTextFieldTTF * pSender);
virtual bool onTextFieldDetachWithIME(CCTextFieldTTF * pSender);
virtual bool onTextFieldInsertText(CCTextFieldTTF * pSender, const char * text, int nLen);
virtual bool onTextFieldDeleteBackward(CCTextFieldTTF * pSender, const char * delText, int nLen);
virtual bool onDraw(CCTextFieldTTF * pSender);
//CCtextFieldttf delegate end
CREATE_FUNC(HelloWorld);
};
This is how your HelloWorld.cpp should be for minimum usage of CCTextFieldttf
CCScene* HelloWorld::scene()
{
// 'scene' is an autorelease object
CCScene *scene = CCScene::create();
// 'layer' is an autorelease object
HelloWorld *layer = HelloWorld::create();
// add layer as a child to scene
scene->addChild(layer);
// return the scene
return scene;
}
// on "init" you need to initialize your instance
bool HelloWorld::init()
{
if ( !CCLayer::init() )
{
return false;
}
this->setTouchEnabled(true);
CCSize visibleSize = CCDirector::sharedDirector()->getVisibleSize();
CCPoint origin = CCDirector::sharedDirector()->getVisibleOrigin();
createTF();
CCLabelTTF* pLabel = CCLabelTTF::create("Hello World", "Arial", 24);
// position the label on the center of the screen
pLabel->setPosition(ccp(origin.x + visibleSize.width/2,
origin.y + visibleSize.height - pLabel->getContentSize().height));
this->addChild(pLabel, 1);
return true;
}
void HelloWorld::createTF()
{
tf = CCTextFieldTTF::textFieldWithPlaceHolder("123", CCSizeMake(100, 100), kCCTextAlignmentCenter, "helvetica", 20);
tf->setColorSpaceHolder(ccWHITE);
tf->setPosition(ccp(200,200));
tf->setHorizontalAlignment(kCCTextAlignmentCenter);
tf->setVerticalAlignment(kCCVerticalTextAlignmentCenter);
tf->setDelegate(this);
addChild(tf);
}
void HelloWorld::registerWithTouchDispatcher()
{
CCDirector* pDirector = CCDirector::sharedDirector();
pDirector->getTouchDispatcher()->addTargetedDelegate(this, 0, false);
}
bool HelloWorld::ccTouchBegan(CCTouch *pTouch, CCEvent *pEvent)
{
CCLog("inside touchbegan");
return true;
}
void HelloWorld::ccTouchEnded(CCTouch *pTouch, CCEvent *pEvent)
{
CCLog("inside touchend");
tf->attachWithIME();
}
bool HelloWorld::onTextFieldAttachWithIME(CCTextFieldTTF * pSender)
{
return false;
}
bool HelloWorld::onTextFieldDetachWithIME(CCTextFieldTTF * pSender)
{
return false;
}
bool HelloWorld::onTextFieldInsertText(CCTextFieldTTF * pSender, const char * text, int nLen)
{
return false;
}
bool HelloWorld::onTextFieldDeleteBackward(CCTextFieldTTF * pSender, const char * delText, int nLen)
{
return false;
}
bool HelloWorld::onDraw(CCTextFieldTTF * pSender)
{
return false;
}
The CCTextFieldDelegate methods are implemented to gain more control over what is entered into CCTextFieldTTF. The only thing that CCTextFieldttf lacks is you have to call CCTextFieldTTF's attachWithIME() method yourself like in the above code it is being called in "ccTouchEnded".
in header
class CustomMultiplayerScene : public PZGBaseMenuScene,public CCIMEDelegate
{
public:
void keyboardWillShow(cocos2d::CCIMEKeyboardNotificationInfo &info);
void keyboardWillHide(cocos2d::CCIMEKeyboardNotificationInfo &info);
void ccTouchesBegan(cocos2d::CCSet *pTouches, cocos2d::CCEvent *pEvent);
};
in .cpp
CCTextFieldTTF *textfield = CCTextFieldTTF::textFieldWithPlaceHolder("Enter Name:", CCSize(480,30), kCCTextAlignmentCenter, "Arial", 12);
// textfield->setAnchorPoint(CCPointZero);
textfield->setPosition(ccp(screenSize.width/2,200));
textfield->setTag(100);
this->addChild(textfield,4);
CCLabelTTF *label = CCLabelTTF::create("ID : ", "", 12);
label->setPosition(ccp(screenSize.width/2,100));
label->setTag(200);
this->addChild(label,4);
implement methods in .cpp
void CustomMultiplayerScene::keyboardWillShow(CCIMEKeyboardNotificationInfo &info)
{
CCLOG("keyboardWillShow");
CCTextFieldTTF *textfield = (CCTextFieldTTF *)this->getChildByTag(100);
textfield->setString("");
}
void CustomMultiplayerScene::keyboardWillHide(CCIMEKeyboardNotificationInfo &info)
{
CCLog("keyboardWillHide");
CCTextFieldTTF *textfield = (CCTextFieldTTF *)this->getChildByTag(100);
CCLabelTTF *label = (CCLabelTTF *)this->getChildByTag(200);
label->setString(textfield->getString());
}
void CustomMultiplayerScene::ccTouchesBegan(cocos2d::CCSet *pTouches, cocos2d::CCEvent *pEvent)
{
CCTouch *pTouch = (CCTouch *)pTouches->anyObject();
CCPoint point = pTouch->getLocationInView();
point = CCDirector::sharedDirector()->convertToGL(point);
CCTextFieldTTF *textfield = (CCTextFieldTTF *)this->getChildByTag(100);
CCRect rect = textfield->boundingBox();
if(rect.containsPoint(point)) {
textfield->attachWithIME();
}
}
CCTextFieldTTF * pTextField = CCTextFieldTTF::textFieldWithPlaceHolder("click here for input",
"Thonburi",
20);
addChild(pTextField);