I would like to get n ERC721 token with a specific "dna".
See the metadata below:
{
"dna": "602472F",
"name": "Test #1",
"description": "My Collectibles",
"image": "ipfs://QmasMm8v9WkU11BtnWsybDW6/1.png",
"edition": 1,
"attributes": [
{
"trait": "type",
"value": "Fire"
},
{
"trait_type": "Eyes",
"value": "Black"
}
]
}
I know how to access a token using tokenURI.
Here is my code:
string public uri;
string public uriSuffix = ".json";
function _baseURI() internal view virtual override returns (string memory) {
return uri;
}
function tokenURI(uint256 _tokenId) public view virtual override returns (string memory){
require(_exists(_tokenId), "ERC721Metadata: URI query for nonexistent token");
string memory currentBaseURI = _baseURI();
return bytes(currentBaseURI).length > 0 ? string(abi.encodePacked(currentBaseURI, _tokenId.toString(), uriSuffix)) : "";
}
Now, how can I check if a token has the dna I am looking for? Should I get this info from Opensea API or from the solidity side?
Ps: All my .json and .png files are hosted in IPFS.
EVM contracts are not able to read offchain data (the JSON file) directly. You'd need to use an offchain app (or an oracle provider such as Chainlink) for that to feed the offchain data to the contract.
So it's much easier to just query the data from an offchain app.
Example using node.js and the web3 package for querying the contract:
const contract = new web3.eth.Contract(abiJson, contractAddress);
const tokenURI = await contract.methods.tokenURI(tokenId);
const contents = (await axios.get(tokenURI)).data;
return contents.dna;
Related
Is there a way to check name of current blockchain using ethers js
For example
const provider = new ethers.providers.JsonRpcProvider(rpcUrl);
const chainInfo = await providers.getNetwork()
gives chain Id and network name but not the blockchain name as etherum or polygon etc
you can use detectNetwork. You make request to RPC server, so the "name" property depends on the response of the RPC server. For example if you make request binance mainnet
const provider = new ethers.providers.JsonRpcProvider(
"https://bsc-dataseed.binance.org"
);
const a = provider.detectNetwork().then((x) => console.log(x));
you will get this:
{
"name": "bnb",
"chainId": 56,
"ensAddress": null,
"_defaultProvider": null
}
But if you make request to "avalance"
const provider = new ethers.providers.JsonRpcProvider(
"https://api.avax.network/ext/bc/C/rpc"
);
const a = provider.detectNetwork().then((x) => console.log(x));
you will get this:
{
"chainId": 43114,
"name": "unknown"
}
It really depends on how the RPC server response is configured. you can find rpc endpoints here
Or you can create a mapping for the most popular networks
const NETWORKS: { [k: string]: string } = {
1: "Ethereum Main Network",
5: "Goerli Test Network",
42: "Kovan Test Network",
56: "Binance Smart Chain",
1337: "Ganache",
};
once you get the chainId, you can return
NETWORKS[chainId]
I want to add a struct to an array and get the first entry from a other function and its not working with web3, but If I add the struct to the array in the same function web3 is working and returns the entry as expected:
pragma solidity >=0.7.0;
pragma experimental ABIEncoderV2;
contract Payback {
struct Address {
uint256 id;
string name;
address _address;
}
Address[] addresses;
function addAddress() external {
Address memory newAddress = Address(
1,
"Test",
0xDEE7796E89C82C36BAdd1375076f39D69FafE252
);
addresses.push(newAddress);
}
function getAddress() external view returns (Address memory) {
return addresses[0];
}
}
My Test is working:
it('gets Address', async () => {
await paybackInstance.addAddress()
let value = await paybackInstance.getAddress()
assert.equal(value[0], "1")
assert.equal(value[1], "Test")
assert.equal(value[2], "0xDEE7796E89C82C36BAdd1375076f39D69FafE252")
});
But if I want to return it with web3 I get an error:
"VM Exception while processing transaction: invalid opcode"
let addresses = await contract.methods.getAddress().call()
console.log(addresses)
I guess I do something wrong with storage/memory but not really understanding it because my test is passing...
It looks like, with web3, you are not adding the struct to array and trying access value at index 0 which does not exist because the array's length is zero.
I'm creating a proxy contract that connect to existing ERC-20 contract.
this proxy contract should able to connects with metamask and show token balances.
every things works fine when add token in metamask with proxy address, it show symbol and decimal number correctly but not balance. shown zero instead.
proxy contract code:
contract Proxy {
address private _implementation;
event Upgraded(address indexed implementation);
function implementation() public view returns (address) {
return _implementation;
}
function upgradeTo(address impl) public {
_implementation = impl;
emit Upgraded(impl);
}
function () payable external {
address _impl = implementation();
require(_impl != address(0));
assembly {
let ptr := mload(0x40)
calldatacopy(ptr, 0, calldatasize)
let result := delegatecall(gas, _impl, ptr, calldatasize, 0, 0)
let size := returndatasize
returndatacopy(ptr, 0, size)
switch result
case 0 { revert(ptr, size) }
default { return(ptr, size) }
}
}
}
the function balanceOf working fine when i add token in metamask with ERC-20 contract address. but show zero by proxy contract
function balanceOf(address tokenOwner) public view returns (uint256) {
return balances[tokenOwner];
}
My efforts
for test i wrote this function:
function test(address theAddress) public view returns (address) {
return theAddress ;
}
when i call argument '0xC357c241b98B15B3A08aeC3AcD49fBC0cbD74fcE'
on ERC-20 contract returns same address but on proxy returns
this value:
0xc357c241b98b19150f7f8f1d47ad1cd500000000
another test that i do is this function:
function test2(string memory theString) public view returns (string memory) {
return theString ;
}
this function works fine on both proxy and ERC-20 contract!!
thanks all.
Edit 1
my test with web3.js
var interval ;
document.addEventListener('DOMContentLoaded', function() {
interval = setInterval(run , 1000);
}, false);
function run(){
web3 = new Web3(web3.currentProvider);
console.log("call");
if(web3.eth.accounts[0] === undefined)
return;
clearInterval(interval);
console.log(web3.eth.accounts[0]);
web3.eth.defaultAccount = web3.eth.accounts[0];
var CoursetroContract = web3.eth.contract( JSON.parse(`[
{
"constant": true,
"inputs": [
{
"name": "theAddress",
"type": "address"
}
],
"name": "test",
"outputs": [
{
"name": "",
"type": "address"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"name": "theString",
"type": "string"
}
],
"name": "test2",
"outputs": [
{
"name": "",
"type": "string"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
}
]`));
var contract = CoursetroContract.at('0xd3744cac3a2f796c16b45e5be582c1c5f3039482'); //proxy
//var contract = CoursetroContract.at('0xd025c8835b2a4bd2f9eeb1d682db224f7b301868'); //erc20
contract.test(0xC357c241b98B15B3A08aeC3AcD49fBC0cbD74fcE,
function(err,result){
console.log("err" ,err);
console.log("result" , result);
}
);
Edit 2
this contract addresses is already available in Ropsten Testnet
The proxy contract works a bit differently.
let result := delegatecall(gas, _impl, ptr, calldatasize, 0, 0)
The DELEGATECALL in your proxy contract calls the contract via the address specified in _impl. So, as a result, it runs the _impl code ( in your case ERC20 ) in proxy contracts's environment. As a result, the storage of proxy is modified and not ERC20 contracts storage. Link to how delegatecall works.
So my suggestion would be to look at how you are initializing your ERC20 contract and setting its balance.
You would have to do something like this
erc20Contract = await erc20Contract.at(proxy.address)
erc20Contract.initialize()
The first line gives you the interface of erc20Contract at proxy contract's address.
And the second line would redo the work of the constructor at proxy contract's address and storage.
I've got android and web apps. Android app uses Couchbase Lite, web app uses Couchbase. I'm using Couchbase Sync Gateway to enable data replication between those two databases.
So far it works ok for sending data from mobile and receiving it both in web app and second mobile device. I noticed that all send documents have "_sync" parameter added.
My question is how can I enable documents added through web app (to couchbase database) to take part in replication? (they don't have field "_sync" by default)
edit
As Legendary_Hunter suggested I tried using Shadow, but still can't get it working. My config file:
{
"log":["CRUD+", "REST+", "Changes+", "Attach+"],
"databases": {
"kris_mobile_db": {
"server":"http://192.168.0.11:8091",
"sync":`
function (doc) {
channel (doc.channels);
}`,
"bucket":"kris_mobile_db",
"users": {
"GUEST": {
"disabled": false,
"admin_channels": ["*"]
}
},
"shadow": {
"server": "http://localhost:8091",
"bucket": "kris_mobile_db_sync"
}
}
}
}
edit2 (29.05.16)
public class DatabaseManager {
private static DatabaseManager manager;
private static CouchbaseEnvironment env = DefaultCouchbaseEnvironment.builder().autoreleaseAfter(6000).build();
private static String bucketName = "kris_mobile_db";
private Cluster cluster;
private Bucket bucket;
public static DatabaseManager getInstance(){
if(manager == null)
manager = new DatabaseManager();
return manager;
}
public Bucket getBucketInstance(){
if(bucket == null)
bucket = cluster.openBucket(bucketName);
return bucket;
}
public boolean establishConnection(String host, String port, String bucketName){
// host: 192.168.0.11, port: 8091
cluster = CouchbaseCluster.create(env, host+":"+port);
DatabaseManager.bucketName = bucketName;
bucket = cluster.openBucket(bucketName);
return true;
}
}
and inserting is like
JsonDocument doc = JsonDocument.create(docId, content);
DatabaseManager.getInstance().getBucketInstance().insert(doc);
edit3
So finally I managed to get shadowing working. If anyone had the same problem. My basic database is kris_mobile_db and syncGateway shadowing database is kris_mobile_db_sync. Config file:
{
"log":["CRUD+", "REST+", "Changes+", "Attach+"],
"databases": {
"kris_mobile_db": {
"server":"http://192.168.0.11:8091",
"sync":`
function (doc) {
channel (doc.channels);
}`,
"bucket":"kris_mobile_db_sync",
"users": {
"GUEST": {
"disabled": false,
"admin_channels": ["*"]
}
},
"shadow":{
"server":"http://192.168.0.11:8091",
"bucket":"kris_mobile_db"
}
}
}
}
Just use bucket shadowing. It is bidirectional syncing of sync gateway bucket with any bucket of couchbase server.
If you want to keep all the good things that the Sync Function gives you, than you have to go through the sync gateway. The sync gateway exposes a REST API that you can use to build your web app.
I have a an Ember Cli project that runs on the localhost:4200 and an asp.net webapi project that runs on localhost:56967. Both projects run fine seperatly: I can start up my Ember app and test several routes, looks fine and I can visit my api (for example: api/products) and i see my response.
The problem I'm having is hooking the two things up with each other.
Adapter
export default DS.RESTAdapter.extend({
host: 'http://localhost:56967',
namespace: 'api'
});
I first ran into some Cors-problems, but i fixed the contentSecurityPolicy in my Ember app and enabled Cors on my Api.
When I go to the products route I can see the request to the Api gets accepted and the Api replies the Json answer. However, I'm failing to serialize the model so I can use it in my Ember App.
This is my response from the Api
[{"ProductId":1,"Name":"Product 1","Description":"Category 1"},{"ProductId":2,"Name":"Product 2","Description":"Category 2"},{"ProductId":3,"Name":"Product 3","Description":"Category 3"}]
Ember model for a product
export default DS.Model.extend({
name : DS.attr('string'),
description: DS.attr('string')
});
Asp.net model for a product:
public class Product
{
public int ProductId { get; set; }
[Required]
public string Name { get; set; }
public string Description { get; set; }
}
I know I have to serialize the Api response to make it "readable" Json for my Ember App. The question now: is it better to change the formatting on the Api? Or make a good serializer? How would I go about making the serializer? It's hard to find some decent tutorials. I tried this but that's not working:
export default DS.RESTSerializer.extend({
primaryKey: 'productId'
});
This is the error i'm getting:
Error while processing route: products No model was found for '0' Error: No model was found for '0'
EDIT
After trying the suggested serializer and some ASP.NET serializers as well I still couldn't get it to work. Today I found this project : http://jsonapi.codeplex.com/. It's a Nuget Package that helps the output of your ASP.NET API to be complient with the json.api standard. I got this working with my Ember-data in no time. Just add the correct header in your rest adapter that it looks like this:
import DS from 'ember-data';
export default DS.RESTAdapter.extend({
host: 'http://localhost:57014',
namespace: 'api',
headers:{
"Accept": "application/vnd.api+json"
}
});
And in your Asp.net model just add
[JsonObject(Title="product")]
public class Product
{
public int Id { get; set; }
public string Name { get; set; }
public string Description { get; set; }
}
It will pluralize your output to this:
{
"products": [
{
"id": "1",
"name": "Product 1",
"description": "Category 1"
},
{
"id": "2",
"name": "Product 2",
"description": "Category 2"
},
{
"id": "3",
"name": "Product 3",
"description": "Category 3"
}
]
}
It's still in Alpha state but looks promising. A small note on the pluralization: it just adds the -s to your model name, something to keep in mind.
The main issue is that Asp Web API returns the following response:
[
{
"ProductId":1,
"Name":"Product 1",
"Description":"Category 1"
}
]
But Ember Data expects the server to respond with the following format instead:
{
"products": [
{
"productId": 1,
"name": "Product 1",
"description": "Category 1"
}
]
}
You could update the response from the Web API server to be in the format that Ember expects, but it's easier to create a Serializer in Ember to map the data from Asp Web API into Ember's format.
I wrote a detailed blog post that explains how to create an Ember Serializer to perform this mapping.
Be sure to read the blog post to understand what is going on in the Serializer. But as a reference, here is what I believe your serializer should look like:
App.ProductSerializer = DS.RESTSerializer.extend({
primaryKey: 'productId',
extract: function(store, primaryType, payload, id, requestType) {
if (payload.length) {
for (var i = 0; i < payload.length; i++) {
this.mapRecord(payload[i]);
}
} else {
this.mapRecord(payload);
}
payloadWithRoot = {};
payloadWithRoot[primaryType.typeKey] = payload;
this._super(store, primaryType, payloadWithRoot, id, requestType)
},
mapRecord: function(record) {
for (var property in record) {
var value = record[property];
record[property.camelize()] = value;
delete record[property];
return record;
}
},
serializeIntoHash: function(hash, type, record, options) {
var recordJSON = record.toJSON();
for (var property in recordJSON) {
var value = recordJSON[property];
hash[property.capitalize()] = value
}
}
});