Python discord bot command with json database - json

So I'm making a command that is !command
When you type that in a chat it will update the member(s) score in the database.
The database will look something like this
{
"person": "epikUbuntu"
"score": "22"
}
How would I go on doing that?
edit:
If I wasn't clear I meant ho would I go on doing the python part of it?

JSON objects in python work like dictionaries.
You can write a new dictionary and save it:
data = {"foo": "bar"}
with open("file.json", "w+") as fp:
json.dump(data, fp, sort_keys=True, indent=4) # kwargs for beautification
And you can load data from a file (this will be for updating the file):
with open("file.json", "r") as fp:
data = json.load(fp) # loading json contents into data variable - this will be a dict
data["foo"] = "baz" # updating values
data["bar"] = "foo" # writing new values
with open("file.json", "w+") as fp:
json.dump(data, fp, sort_keys=True, indent=4)
Discord.py example:
import os # for checking the file exists
def add_score(member: discord.Member, amount: int):
if os.path.isfile("file.json"):
with open("file.json", "r") as fp:
data = json.load(fp)
try:
data[f"{member.id}"]["score"] += amount
except KeyError: # if the user isn't in the file, do the following
data[f"{member.id}"] = {"score": amount} # add other things you want to store
else:
data = {f"{member.id}": {"score": amount}}
# saving the file outside of the if statements saves us having to write it twice
with open("file.json", "w+") as fp:
json.dump(data, fp, sort_keys=True, indent=4) # kwargs for beautification
# you can also return the new/updated score here if you want
def get_score(member: discord.Member):
with open("file.json", "r") as fp:
data = json.load(fp)
return data[f"{member.id}"]["score"]
#bot.command()
async def cmd(ctx):
# some code here
add_score(ctx.author, 10)
# 10 is just an example
# you can use the random module if you want - random.randint(x, y)
await ctx.send(f"You now have {get_score(ctx.author)} score!")
References:
Context managers
Dictionaries
Loading data from a json
Writing to a json

Related

How to convert xml to Json from one directory to other directory

I have a scenario like in one folder having 100 xmls so need to convert in to json with the same names of json instead of xml i have tried many ways but all xmls are converting into one json but i need separate json files
by using this all 100 xmls files are converting into one json file but i need 100 seprate json files, can anyone please suggest best approach
for path, dirs, files in os.walk(r"C:\exml\FND_GLS".format(type) ):
for f in files:
clinical = os.path.join(path, f)
print(clinical)
tree = ET.parse(clinical)
root = tree.getroot()
xmlstr = ET.tostring(root, encoding='utf-8', method='xml')
data_dict = dict(xmltodict.parse(xmlstr))
def write_json(target_path, target_file, data):
if not os.path.exists(target_path):
try:
os.makedirs(target_path)
except Exception as e:
print(e)
raise
with open(os.path.join(target_path, target_file), 'w') as f:
data = json.dump(data_dict,f, sort_keys=False, indent=4)
write_json('C:\ejson\FND_GLS_Json', 'my_json.json', 'data')

Trying to disable/enable commands locally using json file

I'm trying to make 2 commands, One to enable commands and one to disable commands per guild, but I have problems as I don't have much experience with these json files,im trying to make it so when i disable a command it will check if the id is there and if the command is added already,and a command to enable the commmand back,where it checks for the guild id and command name
here is the code:
#client.command()
async def disablecommand(ctx, commandname):
command = client.get_command(commandname)
with open("Disabled_commands.json") as f:
configData = json.load(f)
disabledcommands = configData[f"{ctx.guild.id}"]
if str(ctx.guild.id) in configData: #check if the guild id is in list
disabledcommands.append(command)
if command in disabledcommands: #check if command is already disabled
await ctx.reply("command already disabled")
else: #if command is not disable
disabledcommands.append(command)
with open(f"{ctx.guild.id}", "r+") as f:
data = json.load(f)
data["Disabled_commands"] = disabledcommands
f.seek(0)
f.write(json.dumps(data))
f.truncate()
await ctx.send("command has been disabled")
else: #if guild id is not in json file
disabledcommands.append(command)
with open(f"{ctx.guild.id}", "r+") as f:
data = json.load(f)
data[f"{ctx.guild.id}"] = {}
data["Disabled_commands"] = disabledcommands
f.seek(0)
f.write(json.dumps(data))
f.truncate()
await ctx.send("command has been disabled")
#client.command()
async def enablecommand(ctx, commandname):
command = client.get_command(commandname)
with open("Disabled_commands.json") as f:
configData = json.load(f)
disabledcommands = configData[f"{ctx.guild.id}"]
if str(ctx.guild.id) in configData: #check if server is in the list
if command in disabledcommands: #check if command is disabled
disabledcommands.remove(command)
with open("Disabled_commands.json", "r+") as f:
data = json.load(f)
data[f"{ctx.guild.id}"] = disabledcommands
f.seek(0)
f.write(json.dumps(data))
f.truncate()
await ctx.send("Command is now enabled")
else: #if command is enabled already
await ctx.send("Command isnt disabled")
else: #if server isnt in the list
await ctx.send("Command isnt disabled")
my json should look like this:
{"816726041673990214":{"ping","mute","ban"},"786631236838883400":{"troll","thing","ping"}}
The problem is, that your json is invalid, and so the json decoder raises an error. The problem with your json is that you use {} around "ping","mute","ban", but "ping","mute","ban" is a list, so you would have to use [] around it. Your fixed json should look like this:
{"816726041673990214":["ping","mute","ban"],"786631236838883400":["troll","thing","ping"]}

Is it possible to edit a list in a json file? [duplicate]

Hi I am trying to take the data from a json file and insert and id then perform POST REST.
my file data.json has:
{
'name':'myname'
}
and I would like to add an id so that the json data looks like:
{
'id': 134,
'name': 'myname'
}
So I tried:
import json
f = open("data.json","r")
data = f.read()
jsonObj = json.loads(data)
I can't get to load the json format file.
What should I do so that I can convert the json file into json object and add another id value.
Set item using data['id'] = ....
import json
with open('data.json', 'r+') as f:
data = json.load(f)
data['id'] = 134 # <--- add `id` value.
f.seek(0) # <--- should reset file position to the beginning.
json.dump(data, f, indent=4)
f.truncate() # remove remaining part
falsetru's solution is nice, but has a little bug:
Suppose original 'id' length was larger than 5 characters. When we then dump with the new 'id' (134 with only 3 characters) the length of the string being written from position 0 in file is shorter than the original length. Extra chars (such as '}') left in file from the original content.
I solved that by replacing the original file.
import json
import os
filename = 'data.json'
with open(filename, 'r') as f:
data = json.load(f)
data['id'] = 134 # <--- add `id` value.
os.remove(filename)
with open(filename, 'w') as f:
json.dump(data, f, indent=4)
I would like to present a modified version of Vadim's solution. It helps to deal with asynchronous requests to write/modify json file. I know it wasn't a part of the original question but might be helpful for others.
In case of asynchronous file modification os.remove(filename) will raise FileNotFoundError if requests emerge frequently. To overcome this problem you can create temporary file with modified content and then rename it simultaneously replacing old version. This solution works fine both for synchronous and asynchronous cases.
import os, json, uuid
filename = 'data.json'
with open(filename, 'r') as f:
data = json.load(f)
data['id'] = 134 # <--- add `id` value.
# add, remove, modify content
# create randomly named temporary file to avoid
# interference with other thread/asynchronous request
tempfile = os.path.join(os.path.dirname(filename), str(uuid.uuid4()))
with open(tempfile, 'w') as f:
json.dump(data, f, indent=4)
# rename temporary file replacing old file
os.rename(tempfile, filename)
There is really quite a number of ways to do this and all of the above are in one way or another valid approaches... Let me add a straightforward proposition. So assuming your current existing json file looks is this....
{
"name":"myname"
}
And you want to bring in this new json content (adding key "id")
{
"id": "134",
"name": "myname"
}
My approach has always been to keep the code extremely readable with easily traceable logic. So first, we read the entire existing json file into memory, assuming you are very well aware of your json's existing key(s).
import json
# first, get the absolute path to json file
PATH_TO_JSON = 'data.json' # assuming same directory (but you can work your magic here with os.)
# read existing json to memory. you do this to preserve whatever existing data.
with open(PATH_TO_JSON,'r') as jsonfile:
json_content = json.load(jsonfile) # this is now in memory! you can use it outside 'open'
Next, we use the 'with open()' syntax again, with the 'w' option. 'w' is a write mode which lets us edit and write new information to the file. Here s the catch that works for us ::: any existing json with the same target write name will be erased automatically.
So what we can do now, is simply write to the same filename with the new data
# add the id key-value pair (rmbr that it already has the "name" key value)
json_content["id"] = "134"
with open(PATH_TO_JSON,'w') as jsonfile:
json.dump(json_content, jsonfile, indent=4) # you decide the indentation level
And there you go!
data.json should be good to go for an good old POST request
try this script:
with open("data.json") as f:
data = json.load(f)
data["id"] = 134
json.dump(data, open("data.json", "w"), indent = 4)
the result is:
{
"name":"mynamme",
"id":134
}
Just the arrangement is different, You can solve the problem by converting the "data" type to a list, then arranging it as you wish, then returning it and saving the file, like that:
index_add = 0
with open("data.json") as f:
data = json.load(f)
data_li = [[k, v] for k, v in data.items()]
data_li.insert(index_add, ["id", 134])
data = {data_li[i][0]:data_li[i][1] for i in range(0, len(data_li))}
json.dump(data, open("data.json", "w"), indent = 4)
the result is:
{
"id":134,
"name":"myname"
}
you can add if condition in order not to repeat the key, just change it, like that:
index_add = 0
n_k = "id"
n_v = 134
with open("data.json") as f:
data = json.load(f)
if n_k in data:
data[n_k] = n_v
else:
data_li = [[k, v] for k, v in data.items()]
data_li.insert(index_add, [n_k, n_v])
data = {data_li[i][0]:data_li[i][1] for i in range(0, len(data_li))}
json.dump(data, open("data.json", "w"), indent = 4)
This implementation should suffice:
with open(jsonfile, 'r') as file:
data = json.load(file)
data[id] = value
with open(jsonfile, 'w') as file:
json.dump(data, file)
using context manager for the opening of the jsonfile.
data holds the updated object and dumped into the overwritten jsonfile in 'w' mode.
Not exactly your solution but might help some people solving this issue with keys.
I have list of files in folder, and i need to make Jason out of it with keys.
After many hours of trying the solution is simple.
Solution:
async def return_file_names():
dir_list = os.listdir("./tmp/")
json_dict = {"responseObj":[{"Key": dir_list.index(value),"Value": value} for value in dir_list]}
print(json_dict)
return(json_dict)
Response look like this:
{
"responseObj": [
{
"Key": 0,
"Value": "bottom_mask.GBS"
},
{
"Key": 1,
"Value": "bottom_copper.GBL"
},
{
"Key": 2,
"Value": "copper.GTL"
},
{
"Key": 3,
"Value": "soldermask.GTS"
},
{
"Key": 4,
"Value": "ncdrill.DRD"
},
{
"Key": 5,
"Value": "silkscreen.GTO"
}
]
}

How to use mock_open with json.load()?

I'm trying to get a unit test working that validates a function that reads credentials from a JSON-encoded file. Since the credentials themselves aren't fixed, the unit test needs to provide some and then test that they are correctly retrieved.
Here is the credentials function:
def read_credentials():
basedir = os.path.dirname(__file__)
with open(os.path.join(basedir, "authentication.json")) as f:
data = json.load(f)
return data["bot_name"], data["bot_password"]
and here is the test:
def test_credentials(self):
with patch("builtins.open", mock_open(
read_data='{"bot_name": "name", "bot_password": "password"}\n'
)):
name, password = shared.read_credentials()
self.assertEqual(name, "name")
self.assertEqual(password, "password")
However, when I run the test, the json code blows up with a decode error. Looking at the json code itself, I'm struggling to see why the mock test is failing because json.load(f) simply calls f.read() then calls json.loads().
Indeed, if I change my authentication function to the following, the unit test works:
def read_credentials():
# Read the authentication file from the current directory and create a
# HTTPBasicAuth object that can then be used for future calls.
basedir = os.path.dirname(__file__)
with open(os.path.join(basedir, "authentication.json")) as f:
content = f.read()
data = json.loads(content)
return data["bot_name"], data["bot_password"]
I don't necessarily mind leaving my code in this form, but I'd like to understand if I've got something wrong in my test that would allow me to keep my function in its original form.
Stack trace:
Traceback (most recent call last):
File "test_shared.py", line 56, in test_credentials
shared.read_credentials()
File "shared.py", line 60, in read_credentials
data = json.loads(content)
File "/home/philip/.local/share/virtualenvs/atlassian-webhook-basic-3gOncDp4/lib/python3.6/site-packages/flask/json/__init__.py", line 205, in loads
return _json.loads(s, **kwargs)
File "/usr/lib/python3.6/json/__init__.py", line 367, in loads
return cls(**kw).decode(s)
File "/usr/lib/python3.6/json/decoder.py", line 339, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/lib/python3.6/json/decoder.py", line 357, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
I had the same issue and got around it by mocking json.load and builtins.open:
import json
from unittest.mock import patch, MagicMock
# I don't care about the actual open
p1 = patch( "builtins.open", MagicMock() )
m = MagicMock( side_effect = [ { "foo": "bar" } ] )
p2 = patch( "json.load", m )
with p1 as p_open:
with p2 as p_json_load:
f = open( "filename" )
print( json.load( f ) )
Result:
{'foo': 'bar'}
I had the exact same issue and solved it. Full code below, first the function to test, then the test itself.
The original function I want to test loads a json file that is structured like a dictionary, and checks to see if there's a specific key-value pair in it:
def check_if_file_has_real_data(filepath):
with open(filepath, "r") as f:
data = json.load(f)
if "fake" in data["the_data"]:
return False
else:
return True
But I want to test this without loading any actual file, exactly as you describe. Here's how I solved it:
from my_module import check_if_file_has_real_data
import mock
#mock.patch("my_module.json.load")
#mock.patch("my_module.open")
def test_check_if_file_has_real_data(mock_open, mock_json_load):
mock_json_load.return_value = dict({"the_data": "This is fake data"})
assert check_if_file_has_real_data("filepath") == False
mock_json_load.return_value = dict({"the_data": "This is real data"})
assert check_if_file_has_real_data("filepath") == True
The mock_open object isn't called explicitly in the test function, but if you don't include that decorator and argument you get a filepath error when the with open part of the check_if_file_has_real_data function tries to run using the actual open function rather than the MagicMock object that's been passed into it.
Then you overwrite the response provided by the json.load mock with whatever you want to test.

Scraping Data from JSON

How to scrape this data,
http://jsonviewer.stack.hu/#http://91.134.133.185:5000/viaroute?loc=25.299919,55.376774&loc=25.298738,55.369181
and Extract only total_time" to a file?
It should be fairly easy to achieve this with a little search.
You just have to find some modules to work with json, dataframes and text files, and learn how to use them.
Steps:
1 - read json data using pandas.from_json()
2 - set data = df['total_time']
2 - write data using pandas.to_csv()
Simple as py.
Documentation:
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.read_json.html
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.to_csv.html
import json
json_string = '''Json data here'''
data = json.loads(json_string)
total_time = data["route_summary"]["total_time"]
f = open("file_name_here.txt", "w+")
f.write(str(total_time))
f.close()
I've wrote this program for you:
import json, urllib2
url = 'http://91.134.133.185:5000/viaroute?loc=25.299919,55.376774&loc=25.298738,55.369181'
response = urllib2.urlopen(url)
data = json.load(response)
tot_time = str(data['route_summary']['total_time'])
s = tot_time + "\n"
outfile = "C:\\Users\\USER\\Desktop\\outfile.txt"
with open(outfile, "a+") as f:
f.write(s)
It'll append each observation to the end of outfile.txt
Saving json data to a file and reading that file
import json, urllib2
url = 'http://91.134.133.185:5000/viaroute?loc=25.299919,55.376774&loc=25.298738,55.369181'
response = urllib2.urlopen(url)
data = json.load(response)
outfile = "C:\\Users\\USER\\Desktop\\outfile.txt"
#saving json to file
with open(outfile, "w") as f:
f.write(str(data))
#reading file with json data
with open(outfile, 'r') as g:
json_data = g.readline()
print json_data
#Output:
{u'route_geometry': u'{_ego#m}|rhBpBaBvHuC`EuArEUtEtAlDvEnD`MlDvMli#hsEfFzn#QlTgNhwCs#fKwBhF', u'status': 0, u'via_indices': [0, 15], u'route_summary': {u'total_time': 101, u'end_point': u'', u'start_point': u'', u'total_distance': 871}, u'route_name': [u'', u''], u'hint_data': {u'checksum': 326195011, u'locations': [u'AXQDAP____8AAAAABwAAABEAAAAYAAAAIwIAAERwAgAAAAAADgyCAef7TAMCAAEB', u'bOsDAP____8AAAAAAwAAAAcAAADFAQAAFAAAAEJwAgAAAAAANQeCAd3dTAMFAAEB']}, u'via_points': [[25.299982, 55.376873], [25.29874, 55.369179]], u'status_message': u'Found route between points', u'found_alternative': False}