Bounded Function displaying address in Python - function

class NonVerticalLine:
def __init__(self, point_1, point_2):
self.point_1=point_1
self.point_2=point_2
def slope(self):
p1=self.point_1
p2=self.point_2
return (p2.y-p1.y)/(p2.x-p1.x)
When i pass the value to _init and access function slope it gives--->
>>> from quiz_5 import *
>>> p1 = Point(1,2)
>>> p2 = Point(4,4)
>>> line = NonVerticalLine(point_1 = p1, point_2 = p2)
>>> line.slope
<bound method NonVerticalLine.slope of <quiz_5.NonVerticalLine object at 0x105b00160>>
It should be 0.6666666666666666

In that case, try
class NonVerticalLine:
def __init__(self, point_1, point_2):
self.point_1=point_1
self.point_2=point_2
#property
def slope(self):
p1=self.point_1
p2=self.point_2
return (p2.y-p1.y)/(p2.x-p1.x)
The #property decorator is used to implement getters and setters in Python, so line.slope would be equivalent to line.slope() without the decorator. In this case it seems that this behaviour is what you want.

Related

Problem with PettingZoo and Stable-Baselines3 with a ParallelEnv

I am having trouble in making things work with a Custom ParallelEnv I wrote by using PettingZoo. I am using SuperSuit's ss.pettingzoo_env_to_vec_env_v1(env) as a wrapper to Vectorize the environment and make it work with Stable-Baseline3 and documented here.
You can find attached a summary of the most relevant part of the code:
from typing import Optional
from gym import spaces
import random
import numpy as np
from pettingzoo import ParallelEnv
from pettingzoo.utils.conversions import parallel_wrapper_fn
import supersuit as ss
from gym.utils import EzPickle, seeding
def env(**kwargs):
env_ = parallel_env(**kwargs)
env_ = ss.pettingzoo_env_to_vec_env_v1(env_)
#env_ = ss.concat_vec_envs_v1(env_, 1)
return env_
petting_zoo = env
class parallel_env(ParallelEnv, EzPickle):
metadata = {'render_modes': ['ansi'], "name": "PlayerEnv-Multi-v0"}
def __init__(self, n_agents: int = 20, new_step_api: bool = True) -> None:
EzPickle.__init__(
self,
n_agents,
new_step_api
)
self._episode_ended = False
self.n_agents = n_agents
self.possible_agents = [
f"player_{idx}" for idx in range(n_agents)]
self.agents = self.possible_agents[:]
self.agent_name_mapping = dict(
zip(self.possible_agents, list(range(len(self.possible_agents))))
)
self.observation_spaces = spaces.Dict(
{agent: spaces.Box(shape=(len(self.agents),),
dtype=np.float64, low=0.0, high=1.0) for agent in self.possible_agents}
)
self.action_spaces = spaces.Dict(
{agent: spaces.Discrete(4) for agent in self.possible_agents}
)
self.current_step = 0
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
def observation_space(self, agent):
return self.observation_spaces[agent]
def action_space(self, agent):
return self.action_spaces[agent]
def __calculate_observation(self, agent_id: int) -> np.ndarray:
return self.observation_space(agent_id).sample()
def __calculate_observations(self) -> np.ndarray:
observations = {
agent: self.__calculate_observation(
agent_id=agent)
for agent in self.agents
}
return observations
def observe(self, agent):
return self.__calculate_observation(agent_id=agent)
def step(self, actions):
if self._episode_ended:
return self.reset()
observations = self.__calculate_observations()
rewards = random.sample(range(100), self.n_agents)
self.current_step += 1
self._episode_ended = self.current_step >= 100
infos = {agent: {} for agent in self.agents}
dones = {agent: self._episode_ended for agent in self.agents}
rewards = {
self.agents[i]: rewards[i]
for i in range(len(self.agents))
}
if self._episode_ended:
self.agents = {} # To satisfy `set(par_env.agents) == live_agents`
return observations, rewards, dones, infos
def reset(self,
seed: Optional[int] = None,
return_info: bool = False,
options: Optional[dict] = None,):
self.agents = self.possible_agents[:]
self._episode_ended = False
self.current_step = 0
observations = self.__calculate_observations()
return observations
def render(self, mode="human"):
# TODO: IMPLEMENT
print("TO BE IMPLEMENTED")
def close(self):
pass
Unfortunately when I try to test with the following main procedure:
from stable_baselines3 import DQN, PPO
from stable_baselines3.common.env_checker import check_env
from dummy_env import dummy
from pettingzoo.test import parallel_api_test
if __name__ == '__main__':
# Testing the parallel algorithm alone
env_parallel = dummy.parallel_env()
parallel_api_test(env_parallel) # This works!
# Testing the environment with the wrapper
env = dummy.petting_zoo()
# ERROR: AssertionError: The observation returned by the `reset()` method does not match the given observation space
check_env(env)
# Model initialization
model = PPO("MlpPolicy", env, verbose=1)
# ERROR: ValueError: could not broadcast input array from shape (20,20) into shape (20,)
model.learn(total_timesteps=10_000)
I get the following error:
AssertionError: The observation returned by the `reset()` method does not match the given observation space
If I skip check_env() I get the following one:
ValueError: could not broadcast input array from shape (20,20) into shape (20,)
It seems like that ss.pettingzoo_env_to_vec_env_v1(env) is capable of splitting the parallel environment in multiple vectorized ones, but not for the reset() function.
Does anyone know how to fix this problem?
Plese find the Github Repository to reproduce the problem.
You should double check the reset() function in PettingZoo. It will return None instead of an observation like GYM
Thanks to discussion I had in the issue section of the SuperSuit repository, I am able to post the solution to the problem. Thanks to jjshoots!
First of all it is necessary to have the latest SuperSuit version. In order to get that I needed to install Stable-Baseline3 using the instructions here to make it work with gym 0.24+.
After that, taking the code in the question as example, it is necessary to substitute
def env(**kwargs):
env_ = parallel_env(**kwargs)
env_ = ss.pettingzoo_env_to_vec_env_v1(env_)
#env_ = ss.concat_vec_envs_v1(env_, 1)
return env_
with
def env(**kwargs):
env_ = parallel_env(**kwargs)
env_ = ss.pettingzoo_env_to_vec_env_v1(env_)
env_ = ss.concat_vec_envs_v1(env_, 1, base_class="stable_baselines3")
return env_
The outcomes are:
Outcome 1: leaving the line with check_env(env) I got an error AssertionError: Your environment must inherit from the gym.Env class cf https://github.com/openai/gym/blob/master/gym/core.py
Outcome 2: removing the line with check_env(env), the agent starts training successfully!
In the end, I think that the argument base_class="stable_baselines3" made the difference.
Only the small problem on check_env remains to be reported, but I think it can be considered as trivial if the training works.

Keras Functional API and loss function with multiple inputs

I am trying to use a custom Keras loss function that apart from the usual signature (y_true, y_pred) takes another parameter sigma (which is also produced by the last layer of the network).
The training works fine, but then I am not sure how to perform forward propagation and return sigma (while muis the output of the model.predict method).
This is the code I am using, which features a custom layer GaussianLayer that returns the list [mu, sigma].
import tensorflow as tf
from keras import backend as K
from keras.layers import Input, Dense, Layer, Dropout
from keras.models import Model
from keras.initializers import glorot_normal
import numpy as np
def custom_loss(sigma):
def gaussian_loss(y_true, y_pred):
return tf.reduce_mean(0.5*tf.log(sigma) + 0.5*tf.div(tf.square(y_true - y_pred), sigma)) + 10
return gaussian_loss
class GaussianLayer(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super(GaussianLayer, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel_1 = self.add_weight(name='kernel_1',
shape=(30, self.output_dim),
initializer=glorot_normal(),
trainable=True)
self.kernel_2 = self.add_weight(name='kernel_2',
shape=(30, self.output_dim),
initializer=glorot_normal(),
trainable=True)
self.bias_1 = self.add_weight(name='bias_1',
shape=(self.output_dim, ),
initializer=glorot_normal(),
trainable=True)
self.bias_2 = self.add_weight(name='bias_2',
shape=(self.output_dim, ),
initializer=glorot_normal(),
trainable=True)
super(GaussianLayer, self).build(input_shape)
def call(self, x):
output_mu = K.dot(x, self.kernel_1) + self.bias_1
output_sig = K.dot(x, self.kernel_2) + self.bias_2
output_sig_pos = K.log(1 + K.exp(output_sig)) + 1e-06
return [output_mu, output_sig_pos]
def compute_output_shape(self, input_shape):
return [(input_shape[0], self.output_dim), (input_shape[0], self.output_dim)]
# This returns a tensor
inputs = Input(shape=(1,))
x = Dense(30, activation='relu')(inputs)
x = Dropout(0.3)(x)
x = Dense(30, activation='relu')(x)
x = Dense(40, activation='relu')(x)
x = Dropout(0.3)(x)
x = Dense(30, activation='relu')(x)
mu, sigma = GaussianLayer(1)(x)
model = Model(inputs, mu)
model.compile(loss=custom_loss(sigma), optimizer='adam')
model.fit(train_x, train_y, epochs=150)
Since your model returns two tensors as output, you also need to pass a list of two arrays as the output when calling fit() method. That's essentially what the error is trying to convey:
Error when checking model target:
So the error is in targets (i.e. labels). What is wrong?
the list of Numpy arrays that you are passing to your model is not the size the model expected. Expected to see 2 array(s), but instead got the following list of 1 arrays:
I may have found the answer among Keras FAQs.
I found out that it is possible to retrieve intermediate steps' output using the code snippet below:
layer_name = 'main_output'
intermediate_layer_model = Model(inputs=model.input,
outputs=model.get_layer(layer_name).output)
intermediate_output = intermediate_layer_model.predict(train_x[0])
intermediate_output
In this case intermediate_output is a list of two values [mu, sigma] (just needed to name the output layer main_output and retrieve it later)

cython class with an attributes as python object instance

Let's have an example.
cdef class Example:
attr2 = None
cdef attr3
cdef object attr4
def __init__(self):
self.attr1 = Obj()
self.attr2 = Obj()
self.attr3 = Obj()
self.attr4 = Obj()
The line with the assignment to self.attr1 will raise an AttributeError saying: "object has no attribute 'attr1'.
If try with the self.attr2 it raises also the exception but with the message: "object attribute 'attr2' is read-only".
And if uses the keyword cdef, an it doesn't have an explicit type, the compilation process will fail.
If this attribute is defined with the type object, it looks nice. But different instances of the Example class will have this attr4 like a singleton and any interaction with one of them will be visible for the other instances and, in this case, I want to have an Obj() instance unique for each of them.
You can try something like this :
class Obj(object):
# At first I define a class Obj() to match your example
def __init__(self):
self.abc = 32
self.xyz = "xyzxyz"
# Then I can define the Example class
cdef class Example:
cdef public:
object attr1, attr2, attr3, attr4
def __init__(self):
self.attr1 = Obj()
self.attr2 = Obj()
self.attr3 = Obj()
self.attr4 = Obj()
It seems to be cythonized/compiled without error and you have access to the attributes related to the Obj object :
In [11]:
a = Example()
a.attr1.abc
​
Out[11]:
32

can't pickle instancemethod objects

I met a problem of pickle, Code is that:
import cPickle
class A(object):
def __init__(self):
self.a = 1
def methoda(self):
print(self.a)
class B(object):
def __init__(self):
self.b = 2
a = A()
self.b_a = a.methoda
def methodb(self):
print(self.b)
if __name__ == '__main__':
b = B()
with open('best_model1.pkl', 'w') as f:
cPickle.dump(b, f)
Error is that:
File "/usr/lib/python2.7/copy_reg.py", line 70, in _reduce_ex
raise TypeError, "can't pickle %s objects" % base.name TypeError: can't pickle instancemethod objects
You can if you use dill instead of cPickle.
>>> import dill
>>>
>>> class A(object):
... def __init__(self):
... self.a = 1
... def methods(self):
... print(self.a)
...
>>>
>>> class B(object):
... def __init__(self):
... self.b = 2
... a = A()
... self.b_a = a.methods
... def methodb(self):
... print(self.b)
...
>>> b = B()
>>> b_ = dill.dumps(b)
>>> _b = dill.loads(b_)
>>> _b.methodb()
2
>>>
Also see:
Can't pickle <type 'instancemethod'> when using python's multiprocessing Pool.map()
Also, when dill is installed pickle will work but as usual not cPickle.
import cPickle, pickle
class A(object):
def __init__(self):
self.a = 1
def methoda(self):
print(self.a)
class B(object):
def __init__(self):
self.b = 2
a = A()
self.b_a = a.methoda
def methodb(self):
print(self.b)
# try using cPickle
try:
c = cPickle.dumps(b)
d = cPickle.loads(c)
except Exception as err:
print('Unable to use cPickle (%s)'%err)
else:
print('Using cPickle was successful')
print(b)
print(d)
# try using pickle
try:
c = pickle.dumps(b)
d = pickle.loads(c)
except Exception as err:
print('Unable to use pickle (%s)'%err)
else:
print('Using pickle was successful')
print(b)
print(d)
>>> Unable to use cPickle (can't pickle instancemethod objects)
>>> Using pickle was successful
>>> <__main__.B object at 0x10e9b84d0>
>>> <__main__.B object at 0x13df07190>
for whatever reason, cPickle is not simply a C version of pickle 100 times faster but there are some differences

Python 3 Accessing variable result from another Class

I have a little problem with a variable update.
I have my variable declared in my first function as such self.TestVar = 0
then if a certain count ==2 self.TestVar = 2
in a second function (in the same class) but called from within another class I want returning self.TestVar. no way.
AttributeError: 'ThndClass' object has no attribute 'TestVar'
I am most certainly not doing the good way, all I want is accessing self.TestVar = 2 from my other class that's it's but I can't find a proper way to do so in Python.
It looks like my issue is that I get my self.TestVar = 2 in a "if" statement which make it live in another scope (or I might be wrong).
import sys
from PIL import Image
from PyQt4 import QtCore, QtGui
class MainWindow(QtGui.QWidget):
def __init__(self):
super(MainWindow, self).__init__()
self.initUI()
def initUI(self):
self.TestVar = 0
self.TheCount = 2
if self.TheCount ==2:
self.TestVar = 2
ThndClass()
def Getit(self):
print("called correctly")
print(self.TestVar)
return self.TestVar
def main():
app = QtGui.QApplication([])
mw = MainWindow()
sys.exit(app.exec_())
class ThndClass(QtGui.QWidget):
def __init__(self):
super(ThndClass, self).__init__()
self.initUI2()
def initUI2(self):
print("Class Called")
print(MainWindow.Getit(self))
if __name__ == '__main__':
main()
If I remove the 2nd Class call :
import sys
from PIL import Image
from PyQt4 import QtCore, QtGui
class MainWindow(QtGui.QWidget):
def __init__(self):
super(MainWindow, self).__init__()
self.initUI()
def initUI(self):
self.TestVar = 0
self.TheCount = 2
if self.TheCount ==2:
self.TestVar = 2
self.Getit()
def Getit(self):
print("called correctly")
print(self.TestVar)
return self.TestVar
def main():
app = QtGui.QApplication([])
mw = MainWindow()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
This works correctly, but I want to be able to call def Getit() from another class and get my result. Or simply get a way to directly access self.TestVar from my other class.
When you call
MainWindow.Getit(self)
in ThndClass.initUI2, you are treating MainWindow and ThndClass interchangeably, when they do not have the same attributes. Here is an actual minimal example:
class Parent():
def __init__(self):
pass
class Child1(Parent):
def __init__(self):
super().__init__()
self.foo = "foo"
def method(self):
print(type(self))
print(self.foo)
class Child2(Parent):
def __init__(self):
super().__init__()
self.bar = "bar"
c1 = Child1()
Child1.method(c1) # pass Child1 instance to Child1 instance method
c2 = Child2()
Child1.method(c2) # pass Child2 instance to Child1 instance method
and full output:
<class '__main__.Child1'> # gets a Child1 instance
foo # first call succeeds
<class '__main__.Child2'> # gets a Child2 instance (which doesn't have 'foo')
Traceback (most recent call last):
File "C:/Python34/so.py", line 25, in <module>
Child1.method(c2)
File "C:/Python34/so.py", line 11, in method
print(self.foo)
AttributeError: 'Child2' object has no attribute 'foo' # second call fails
However, as it is not clear what exactly the code is supposed to be doing, I can't suggest a fix. I don't know why you create but don't assign a ThndClass instance in MainWindow.initUI, for example.
Here is one possible fix; pass a Child1 instance to Child2.__init__, then use it either as an argument to Child2.method:
class Child2(Parent):
def __init__(self, c1): # provide Child1 instance as parameter
super().__init__()
self.bar = "bar"
self.method(c1) # pass instance to Child2.method
def method(self, c1):
c1.method() # call Child1.method with c1 as self parameter
(Note that c1.method() is equivalent to Child1.method(c1).)
or make it an instance attribute:
class Child2(Parent):
def __init__(self, c1): # provide Child1 instance as parameter
super().__init__()
self.bar = "bar"
self.c1 = c1 # make Child1 instance a Child2 instance attribute
self.method() # now no argument needed
def method(self):
self.c1.method() # call Child1.method with c1 as self parameter
(Note that self.c1.method() is equivalent to Child1.method(self.c1).)
In use (either way):
>>> c1 = Child1()
>>> c2 = Child2(c1)
<class '__main__.Child1'> # Child1.method gets a Child1 instance
foo # and is called successfully
Thank's to your help jonrsharpe here's my working code :)
import sys
from PIL import Image
from PyQt4 import QtCore, QtGui
class MainWindow(QtGui.QWidget):
def __init__(self):
super(MainWindow, self).__init__()
self.initUI()
def initUI(self):
self.TestVar = 0
self.TheCount = 2
if self.TheCount ==2:
self.TestVar = 2
Themain = self
ThndClass(Themain)
def Getit(self):
print("called correctly")
print(self.TestVar)
return self.TestVar
def main():
app = QtGui.QApplication([])
mw = MainWindow()
sys.exit(app.exec_())
class ThndClass(QtGui.QWidget):
def __init__(self, Themain):
super(ThndClass, self).__init__()
self.Themain = Themain
self.initUI2()
def initUI2(self):
print("Class Called")
print(self.Themain.Getit())
if __name__ == '__main__':
main()
All working good now : ) Thanks you very much !