Erlang - undefined function - function

I'm trying to execute a very simple Erlang code, and it's not working.
I've tryied executed some hello worlds without problem, but not mine own code.
-module(server).
%% Exported Functions
-export([start/0, process_requests/1]).
%% API Functions
start() ->
ServerPid = spawn(server, process_requests, [[]]),
register(myserver, ServerPid).
process_requests(Clients) ->
receive
{client_join_req, Name, From} ->
NewClients = [From|Clients], %% TODO: COMPLETE
broadcast(NewClients, {join, Name}),
process_requests(NewClients); %% TODO: COMPLETE
{client_leave_req, Name, From} ->
NewClients = lists:delete(From, Clients), %% TODO: COMPLETE
broadcast(Clients, {leave, Name}), %% TODO: COMPLETE
process_requests(NewClients); %% TODO: COMPLETE
{send, Name, Text} ->
broadcast(Clients, {message, Name, Text}), %% TODO: COMPLETE
process_requests(Clients)
end.
%% Local Functions
broadcast(PeerList, Message) ->
Fun = fun(Peer) -> Peer ! Message end,
lists:map(Fun, PeerList).
Compile result:
5> c(server).
{ok,server}
6> server:start().
** exception error: undefined function server:start/0

You compile you code with c/1, but you forgot to load it to VM with l/1. While VM does loads modules new automatically (modules not yet loaded to VM), it doesn't reload them each time you compile to new beam.
If you do it a lot in development you might want to look into tools like sync.

Try to check with pwd(). whether you are in the directory where your listed server code is. Seems to be a path issue. It also can happen that in your code:get_path() there is a directory where another server.beam is sitting that has not got a start function.

Related

nix-shell --command `stack build` leads to libpq-fe.h: No such file or directory

i am trying to compile my small project (a yesod application with lambdacms) on nixos. However, after using cabal2nix (more precisely cabal2nix project-karma.cabal --sha256=0 --shell > shell.nix) , I am still missing a dependency wrt. postgresql it seems.
My shell.nix file looks like this:
{ nixpkgs ? import <nixpkgs> {}, compiler ? "default" }:
let
inherit (nixpkgs) pkgs;
f = { mkDerivation, aeson, base, bytestring, classy-prelude
, classy-prelude-conduit, classy-prelude-yesod, conduit, containers
, data-default, directory, fast-logger, file-embed, filepath
, hjsmin, hspec, http-conduit, lambdacms-core, monad-control
, monad-logger, persistent, persistent-postgresql
, persistent-template, random, resourcet, safe, shakespeare, stdenv
, template-haskell, text, time, transformers, unordered-containers
, uuid, vector, wai, wai-extra, wai-logger, warp, yaml, yesod
, yesod-auth, yesod-core, yesod-form, yesod-static, yesod-test
}:
mkDerivation {
pname = "karma";
version = "0.0.0";
sha256 = "0";
isLibrary = true;
isExecutable = true;
libraryHaskellDepends = [
aeson base bytestring classy-prelude classy-prelude-conduit
classy-prelude-yesod conduit containers data-default directory
fast-logger file-embed filepath hjsmin http-conduit lambdacms- core
monad-control monad-logger persistent persistent-postgresql
persistent-template random safe shakespeare template-haskell text
time unordered-containers uuid vector wai wai-extra wai-logger warp
yaml yesod yesod-auth yesod-core yesod-form yesod-static
nixpkgs.zlib
nixpkgs.postgresql
nixpkgs.libpqxx
];
libraryPkgconfigDepends = [ persistent-postgresql];
executableHaskellDepends = [ base ];
testHaskellDepends = [
base classy-prelude classy-prelude-yesod hspec monad-logger
persistent persistent-postgresql resourcet shakespeare transformers
yesod yesod-core yesod-test
];
license = stdenv.lib.licenses.bsd3;
};
haskellPackages = if compiler == "default"
then pkgs.haskellPackages
else pkgs.haskell.packages.${compiler};
drv = haskellPackages.callPackage f {};
in
if pkgs.lib.inNixShell then drv.env else drv
The output is as follows:
markus#nixos ~/git/haskell/karma/karma (git)-[master] % nix-shell --command `stack build`
postgresql-libpq-0.9.1.1: configure
ReadArgs-1.2.2: download
postgresql-libpq-0.9.1.1: build
ReadArgs-1.2.2: configure
ReadArgs-1.2.2: build
ReadArgs-1.2.2: install
-- While building package postgresql-libpq-0.9.1.1 using:
/run/user/1000/stack31042/postgresql-libpq-0.9.1.1/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/setup --builddir=.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/ build --ghc-options " -ddump-hi -ddump-to-file"
Process exited with code: ExitFailure 1
Logs have been written to: /home/markus/git/haskell/karma/karma/.stack-work/logs/postgresql-libpq-0.9.1.1.log
[1 of 1] Compiling Main ( /run/user/1000/stack31042/postgresql-libpq-0.9.1.1/Setup.hs, /run/user/1000/stack31042/postgresql-libpq-0.9.1.1/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/Main.o )
Linking /run/user/1000/stack31042/postgresql-libpq-0.9.1.1/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/setup ...
Configuring postgresql-libpq-0.9.1.1...
Building postgresql-libpq-0.9.1.1...
Preprocessing library postgresql-libpq-0.9.1.1...
LibPQ.hsc:213:22: fatal error: libpq-fe.h: No such file or directory
compilation terminated.
compiling .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/PostgreSQL/LibPQ_hsc_make.c failed (exit code 1)
command was: /nix/store/9fbfiij3ajnd3fs1zyc2qy0ispbszrr7-gcc-wrapper-4.9.3/bin/gcc -c .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/PostgreSQL/LibPQ_hsc_make.c -o .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/PostgreSQL/LibPQ_hsc_make.o -fno-stack-protector -D__GLASGOW_HASKELL__=710 -Dlinux_BUILD_OS=1 -Dx86_64_BUILD_ARCH=1 -Dlinux_HOST_OS=1 -Dx86_64_HOST_ARCH=1 -I/run/current-system/sw/include -Icbits -I.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/autogen -include .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/autogen/cabal_macros.h -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/bytes_6elQVSg5cWdFrvRnfxTUrH/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/base_GDytRqRVSUX7zckgKqJjgw/include -I/nix/store/6ykqcjxr74l642kv9gf1ib8v9yjsgxr9-gmp-5.1.3/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/integ_2aU3IZNMF9a7mQ0OzsZ0dS/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/include/
I assume not much is missing, so a pointer would be nice.
What is also weird, that is that "nix-shell" works but following that up with "stack exec yesod devel" tells me
Resolving dependencies...
Configuring karma-0.0.0...
cabal: At least the following dependencies are missing:
classy-prelude >=0.10.2,
classy-prelude-conduit >=0.10.2,
classy-prelude-yesod >=0.10.2,
hjsmin ==0.1.*,
http-conduit ==2.1.*,
lambdacms-core >=0.3.0.2 && <0.4,
monad-logger ==0.3.*,
persistent >=2.0 && <2.3,
persistent-postgresql >=2.1.1 && <2.3,
persistent-template >=2.0 && <2.3,
uuid >=1.3,
wai-extra ==3.0.*,
warp >=3.0 && <3.2,
yesod >=1.4.1 && <1.5,
yesod-auth >=1.4.0 && <1.5,
yesod-core >=1.4.6 && <1.5,
yesod-form >=1.4.0 && <1.5,
yesod-static >=1.4.0.3 && <1.6
When using mysql instead, I am getting
pcre-light-0.4.0.4: configure
mysql-0.1.1.8: configure
mysql-0.1.1.8: build
Progress: 2/59
-- While building package mysql-0.1.1.8 using:
/run/user/1000/stack12820/mysql-0.1.1.8/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/setup --builddir=.stack-work/dist/x86_64- linux/Cabal-1.22.4.0/ build --ghc-options " -ddump-hi -ddump-to-file"
Process exited with code: ExitFailure 1
Logs have been written to: /home/markus/git/haskell/karma/karma/.stack-work/logs/mysql-0.1.1.8.log
[1 of 1] Compiling Main ( /run/user/1000/stack12820/mysql-0.1.1.8/Setup.lhs, /run/user/1000/stack12820/mysql-0.1.1.8/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/Main.o )
Linking /run/user/1000/stack12820/mysql-0.1.1.8/.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/setup/setup ...
Configuring mysql-0.1.1.8...
Building mysql-0.1.1.8...
Preprocessing library mysql-0.1.1.8...
In file included from C.hsc:68:0:
include/mysql_signals.h:9:19: fatal error: mysql.h: No such file or directory
#include "mysql.h"
^
compilation terminated.
compiling .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/MySQL/Base/C_hsc_make.c failed (exit code 1)
command was: /nix/store/9fbfiij3ajnd3fs1zyc2qy0ispbszrr7-gcc-wrapper-4.9.3/bin/gcc -c .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/MySQL/Base/C_hsc_make.c -o .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/Database/MySQL/Base/C_hsc_make.o -fno-stack-protector -D__GLASGOW_HASKELL__=710 -Dlinux_BUILD_OS=1 -Dx86_64_BUILD_ARCH=1 -Dlinux_HOST_OS=1 -Dx86_64_HOST_ARCH=1 -I/nix/store/7ppa4k2drrvjk94rb60c1df9nvw0z696-mariadb-10.0.22-lib/include -I/nix/store/7ppa4k2drrvjk94rb60c1df9nvw0z696-mariadb-10.0.22-lib/include/.. -Iinclude -I.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/autogen -include .stack-work/dist/x86_64-linux/Cabal-1.22.4.0/build/autogen/cabal_macros.h -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/bytes_6elQVSg5cWdFrvRnfxTUrH/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/base_GDytRqRVSUX7zckgKqJjgw/include -I/nix/store/6ykqcjxr74l642kv9gf1ib8v9yjsgxr9-gmp-5.1.3/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/integ_2aU3IZNMF9a7mQ0OzsZ0dS/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/include -I/nix/store/xphvly2zcd6jsc2xklz1zmmz4y0dh3ny-ghc-7.10.2/lib/ghc-7.10.2/include/
-- While building package pcre-light-0.4.0.4 using:
/home/markus/.stack/setup-exe-cache/setup-Simple-Cabal-1.22.4.0-x86_64-linux-ghc-7.10.2 --builddir=.stack-work/dist/x86_64-linux/Cabal-1.22.4.0/ configure --with-ghc=/run/current-system/sw/bin/ghc --user --package-db=clear --package-db=global --package-db=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/pkgdb/ --libdir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/lib --bindir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/bin --datadir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/share --libexecdir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/libexec --sysconfdir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/etc --docdir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/doc/pcre-light-0.4.0.4 --htmldir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/doc/pcre-light-0.4.0.4 --haddockdir=/home/markus/.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/doc/pcre-light-0.4.0.4 --dependency=base=base-4.8.1.0-4f7206fd964c629946bb89db72c80011 --dependency=bytestring=bytestring-0.10.6.0-18c05887c1aaac7adb3350f6a4c6c8ed
Process exited with code: ExitFailure 1
Logs have been written to: /home/markus/git/haskell/karma/karma/.stack-work/logs/pcre-light-0.4.0.4.log
Configuring pcre-light-0.4.0.4...
setup-Simple-Cabal-1.22.4.0-x86_64-linux-ghc-7.10.2: The program 'pkg-config'
version >=0.9.0 is required but it could not be found.
After adding pkgconfig to my global configuration, the build seems to get a little further ahead, so it seems that shell.nix is ignored somewhat.
(Sources for what I tried so far:
https://groups.google.com/forum/#!topic/haskell-stack/_ZBh01VP_fo)
Update: It seems like I overlooked this section of the manual
http://nixos.org/nixpkgs/manual/#using-stack-together-with-nix
However, the first idea that came to mind
(stack --extra-lib-dirs=/nix/store/c6qy7n5wdwl164lnzha7vpc3av9yhnga-postgresql-libpq-0.9.1.1/lib build)
did not work yet, most likely I need to use
--extra-include-dirs or try one of the variations. It seems weird that stack is still trying to build postgresql-libpq in the very same version, though.
Update2: Currently trying out "stack --extra-lib-dirs=/nix/store/1xf77x47d0m23nbda0azvkvj8w8y77c7-postgresql-9.4.5/lib --extra-include-dirs=/nix/store/1xf77x47d0m23nbda0azvkvj8w8y77c7-postgresql-9.4.5/include build" which looks promising. Does not look like the nix-way, but still.
Update3: Still getting
<command line>: can't load .so/.DLL for: /home/markus /.stack/snapshots/x86_64-linux/nightly-2015-11-17/7.10.2/lib/x86_64-linux- ghc-7.10.2/postgresql-libpq-0.9.1.1-ABGs5p1J8FbEwi6uvHaiV6/libHSpostgresql-libpq-0.9.1.1-ABGs5p1J8FbEwi6uvHaiV6-ghc7.10.2.so
(libpq.so.5: cannot open shared object file: No such file or directory) stack build 186.99s user 2.93s system 109% cpu 2:52.76 total
which is strange since libpq.so.5 is contained in /nix/store/1xf77x47d0m23nbda0azvkvj8w8y77c7-postgresql-9.4.5/lib.
An additional
$LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/nix/store/1xf77x47d0m23nbda0azvkvj8w8y77c7-postgresql-9.4.5/lib
does not help either.
Update4:
By the way, yesod devel does the same as stack exec yesod devel. My libraries are downloaded to /nix/store but they are not recognized.
Maybe I need to make "build-nix" work and yesod devel does not work here?
Just for completeness, here is stack.yaml
resolver: nightly-2015-11-17
#run stack setup otherwise!!
# Local packages, usually specified by relative directory name
packages:
- '.'
# Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)
extra-deps: [lambdacms-core-0.3.0.2 , friendly-time-0.4, lists-0.4.2, list-extras-0.4.1.4 ]
# Override default flag values for local packages and extra-deps
flags:
karma:
library-only: false
dev: false
# Extra package databases containing global packages
extra-package-dbs: []
Next weekend, I will check out
https://pr06lefs.wordpress.com/2014/09/27/compiling-a-yesod-project-on-nixos/
and other search results.
Funny, because I've just had a similar problem myself - solved it by adding these two lines to stack.yaml:
extra-include-dirs: [/nix/store/jrdvjvf0w9nclw7b4k0pdfkljw78ijgk-postgresql-9.4.5/include/]
extra-lib-dirs: [/nix/store/jrdvjvf0w9nclw7b4k0pdfkljw78ijgk-postgresql-9.4.5/lib/]
You may want to check first which postgresql's path from the /nix/store you should use with include/ and lib/:
nix-build --no-out-link "<nixpkgs>" -A postgresql
And BTW, why do you use nix-shell if you are going to use stack and you have project-karma.cabal available..? Have you considered migrating your project with stack init..?
Looks like stack is trying to build haskellPackages.postgresql-libpq outside of the nix framework.
You probably don't want that to happen. Maybe try to add postgresql-libpq to libraryHaskellDepends?

Error: bad argument #1 to 'insert' (table expected, got nil)

I am trying to connect to a mysql server using LuaSql via a mysql proxy. I try to execute a simple program (db.lua):
require("luasql.mysql")
local _sqlEnv = assert(luasql.mysql())
local _con = nil
function read_auth(auth)
local host, port = string.match(proxy.backends[1].address, "(.*):(.*)")
_con = assert(_sqlEnv:connect( "db_name", "username", "password", "hostname", "3306"))
end
function disconnect_client()
assert(_con:close())
end
function read_query(packet)
local cur = con:execute("select * from t1")
myTable = {}
row = cur:fetch(myTable, "a")
print(myTable.id,myTable.user)
end
This code executes well when I execute it without mysql-proxy. When I am connecting with mysql-proxy, the error-log displays these errors:
mysql.lua:8: bad argument #1 to 'insert' (table expected, got nil)
db.lua:1: loop or previous error loading module 'luasql.mysql'
mysql.lua is a default file of LuaSql:
---------------------------------------------------------------------
-- MySQL specific tests and configurations.
-- $Id: mysql.lua,v 1.4 2006/01/25 20:28:30 tomas Exp $
---------------------------------------------------------------------
QUERYING_STRING_TYPE_NAME = "binary(65535)"
table.insert (CUR_METHODS, "numrows")
table.insert (EXTENSIONS, numrows)
---------------------------------------------------------------------
-- Build SQL command to create the test table.
---------------------------------------------------------------------
local _define_table = define_table
function define_table (n)
return _define_table(n) .. " TYPE = InnoDB;"
end
---------------------------------------------------------------------
-- MySQL versions 4.0.x do not implement rollback.
---------------------------------------------------------------------
local _rollback = rollback
function rollback ()
if luasql._MYSQLVERSION and string.sub(luasql._MYSQLVERSION, 1, 3) == "4.0" then
io.write("skipping rollback test (mysql version 4.0.x)")
return
else
_rollback ()
end
end
As stated in my previous comment, the error indicates that table.insert (CUR_METHODS, ...) is getting a nil as first arg. Since the first arg is CUR_METHODS, it means that this object CUR_METHODS has not been defined yet. Since this happens near top of the luasql.mysql module, my guess is that the luasql initialization was incomplete, maybe because the mysql DLL was not found. My guess is that the LUA_CPATH does not find the MySQL DLL for luasql, but I'm surprised that you wouldn't get a package error, so something odd is going on. You'll have to dig into the luasql module and C file to figure out why it is not being created.
Update: alternately, update your post to show the output of print("LUA path:", package.path) and print("LUA path:", package.cpath) from your mysql-proxy script and also show the path of folder where luasql is installed and contents of that folder.

How to setup username and password with Slick's source code generator?

Following the directions in this page: http://slick.typesafe.com/doc/2.0.0/code-generation.html
we see that something like the following segment of code is required to generate models for mysql tables
val url = "jdbc:mysql://127.0.0.1/SOME_DB_SCHEMA?characterEncoding=UTF-8&useUnicode=true"
val slickDriver = "scala.slick.driver.MySQLDriver"
val jdbcDriver = "com.mysql.jdbc.Driver"
val outputFolder = "/some/path"
val pkg = "com.pligor.server"
scala.slick.model.codegen.SourceCodeGenerator.main(
Array(slickDriver, jdbcDriver, url, outputFolder, pkg)
)
These parameteres are enough for an H2 database as the example in the link has it.
How to include username and password for the MySQL database?
From several links found in the internet and also based on the cvogt's answer this is the minimum that you need to do.
Note that this is a general solution for sbt. If you are dealing with play framework you might find it easier to perform this task with the relevant plugin
First of all you need a new sbt project because of all the library dependencies that are needed to be referenced in order for slick source generator to run.
Create the new sbt project using this tutorial: http://scalatutorials.com/beginner/2013/07/18/getting-started-with-sbt/
Preferably use the method Setup using giter8
If it happens to work with Intellij then you need to create file project/plugins.sbt and insert inside this line: addSbtPlugin("com.hanhuy.sbt" % "sbt-idea" % "1.6.0").
Execute gen-idea in sbt to generate an intellij project.
With giter8 you get an auto-generated file ProjectNameBuild.scala inside project folder. Open this and include at least these library dependencies:
libraryDependencies ++= List(
"mysql" % "mysql-connector-java" % "5.1.27",
"com.typesafe.slick" %% "slick" % "2.0.0",
"org.slf4j" % "slf4j-nop" % "1.6.4",
"org.scala-lang" % "scala-reflect" % scala_version
)
where scala version is the variable private val scala_version = "2.10.3"
Now create the custom source code generator that looks like that:
import scala.slick.model.codegen.SourceCodeGenerator
object CustomSourceCodeGenerator {
import scala.slick.driver.JdbcProfile
import scala.reflect.runtime.currentMirror
def execute(url: String,
jdbcDriver: String,
user: String,
password: String,
slickDriver: String,
outputFolder: String,
pkg: String) = {
val driver: JdbcProfile = currentMirror.reflectModule(
currentMirror.staticModule(slickDriver)
).instance.asInstanceOf[JdbcProfile]
driver.simple.Database.forURL(
url,
driver = jdbcDriver,
user = user,
password = password
).withSession {
implicit session =>
new SourceCodeGenerator(driver.createModel).writeToFile(slickDriver, outputFolder, pkg)
}
}
}
Finally you need to call this execute method inside main project object. Find the file ProjectName.scala that was auto-generated by giter8.
Inside it you will find a println call since this is merely a "hello world" application. Above println call something like that:
CustomSourceCodeGenerator.execute(
url = "jdbc:mysql://127.0.0.1/SOME_DB_SCHEMA?characterEncoding=UTF-8&useUnicode=true",
slickDriver = "scala.slick.driver.MySQLDriver",
jdbcDriver = "com.mysql.jdbc.Driver",
outputFolder = "/some/path",
pkg = "com.pligor.server",
user = "root",
password = "xxxxxyourpasswordxxxxx"
)
This way every time you execute sbt run you are going to generate the Table classes required by Slick automatically
Note that at least for 2.0.1 this is fixed. Just add username and password to the end of the Array as Strings
This has been asked and answered here: https://groups.google.com/forum/#!msg/scalaquery/UcS4_wyrJq0/obLHheIWIXEJ . Currently you need to customize the code generator. A PR for 2.0.1 is in the queue.
My solution is nearly the same as George's answer, but I'll add mine anyway. This is the entire file I use to generate code for my mysql database in an SBT project.
SlickAutoGen.scala
package mypackage
import slick.model.codegen.SourceCodeGenerator
object CodeGen {
def main(args: Array[String]) {
SourceCodeGenerator.main(
Array(
"scala.slick.driver.MySQLDriver",
"com.mysql.jdbc.Driver",
"jdbc:mysql://localhost:3306/mydb",
"src/main/scala/",
"mypackage",
"root",
"" // I don't use a password on localhost
)
)
}
}
build.sbt
// build.sbt --- Scala build tool settings
libraryDependencies ++= List(
"com.typesafe.slick" %% "slick" % "2.0.1",
"mysql" % "mysql-connector-java" % "5.1.24",
...
)
To use this, just modify settings, save in project root directory and run as follows:
$ sbt
> runMain mypackage.CodeGen

How do I get an unhandled exception to be reported in SML/NJ?

I have the following SML program in a file named testexc.sml:
structure TestExc : sig
val main : (string * string list -> int)
end =
struct
exception OhNoes;
fun main(prog_name, args) = (
raise OhNoes
)
end
I build it with smlnj-110.74 like this:
ml-build sources.cm TestExc.main testimg
Where sources.cm contains:
Group is
csx.sml
I invoke the program like so (on Mac OS 10.8):
sml #SMLload testimg.x86-darwin
I expect to see something when I invoke the program, but the only thing I get is a return code of 1:
$ sml #SMLload testimg.x86-darwin
$ echo $?
1
What gives? Why would SML fail silently on this unhandled exception? Is this behavior normal? Is there some generic handler I can put on main that will print the error that occurred? I realize I can match exception OhNoes, but what about larger programs with exceptions I might not know about?
The answer is to handle the exception, call it e, and print the data using a couple functions available in the system:
$ sml
Standard ML of New Jersey v110.74 [built: Tue Jan 31 16:23:10 2012]
- exnName;
val it = fn : exn -> string
- exnMessage;
val it = fn : exn -> string
-
Now, we have our modified program, were we have the generic handler tacked on to main():
structure TestExc : sig
val main : (string * string list -> int)
end =
struct
exception OhNoes;
open List;
fun exnToString(e) =
List.foldr (op ^) "" ["[",
exnName e,
" ",
exnMessage e,
"]"]
fun main(prog_name, args) = (
raise OhNoes
)
handle e => (
print("Grasshopper disassemble: " ^ exnToString(e));
42)
end
I used lists for generating the message, so to make this program build, you'll need a reference to the basis library in sources.cm:
Group is
$/basis.cm
sources.cm
And here's what it looks like when we run it:
$ sml #SMLload testimg.x86-darwin
Grasshopper disassemble: [OhNoes OhNoes (more info unavailable: ExnInfoHook not initialized)]
$ echo $?
42
I don't know what ExnInfoHook is, but I see OhNoes, at least. It's too bad the SML compiler didn't add a basic handler for us, so as to print something when there was an unhandled exception in the compiled program. I suspect ml-build would be responsible for that task.

How do I get yason:encode-alist to return the encoded string instead of sending it to a stream?

I'm trying to encode a JSON string from an alist using YASON. The problem is, the return value I'm getting is the original alist I fed it. It's printing the JSON string, and according to the documentation, it goes to *STANDARD-OUTPUT*.
Simple example session:
(ql:quickload :yason)
To load "yason":
Load 1 ASDF system:
yason
; Loading "yason"
(:YASON)
* (defparameter starving-json-eater (yason:encode-alist '(("foo" . "bar") ("baz" . "qux"))))
{"foo":"bar","baz":"qux"}
STARVING-JSON-EATER
* starving-json-eater
(("foo" . "bar") ("baz" . "qux"))
I've tried passing 'starving-json-eater into the stream parameter, but I get an error:
* (setf starving-json-eater (yason:encode-alist '(("foo" . "bar") ("baz" . "qux")) 'starving-json-eater))
debugger invoked on a SIMPLE-ERROR in thread
#<THREAD "main thread" RUNNING {1001E06783}>:
There is no applicable method for the generic function
#<STANDARD-GENERIC-FUNCTION SB-GRAY:STREAM-WRITE-CHAR (1)>
when called with arguments
(STARVING-JSON-EATER #\{).
Type HELP for debugger help, or (SB-EXT:EXIT) to exit from SBCL.
restarts (invokable by number or by possibly-abbreviated name):
0: [RETRY] Retry calling the generic function.
1: [ABORT] Exit debugger, returning to top level.
((:METHOD NO-APPLICABLE-METHOD (T)) #<STANDARD-GENERIC-FUNCTION SB-GRAY:STREAM-WRITE-CHAR (1)> STARVING-JSON-EATER #\{) [fast-method]
How can I get {"foo":"bar","baz":"qux"} into starving-json-eater?
You can use WITH-OUTPUT-TO-STRING to temporarily bind a variable to an open stream which writes into a string. You may even bind the special variable *standard-output* so that you only change the dynamic context of your code without providing explicitly a different stream argument (like when you redirect streams with processes).
(with-output-to-string (*standard-output*)
(yason:encode-alist '(("a" . "b"))))
Note that binding *standard-output* means that anything that writes to *standard-output* will end up being written in the string during the extent of with-output-to-string. In the above case, the scope is sufficiently limited to avoid unexpectedly capturing output from nested code. You could also use a lexical variable to control precisely who gets to write to the string:
(with-output-to-string (json)
(yason:encode-alist '(("a" . "b")) json))
The trick is to create a throwaway string output stream to catch the value, and then grab it from later:
* (ql:quickload :yason)
To load "yason":
Load 1 ASDF system:
yason
; Loading "yason"
(:YASON)
* (defparameter sated-json-eater (make-string-output-stream))
SATED-JSON-EATER
* (yason:encode-alist '(("foo" . "bar") ("baz" . "qux")) sated-json-eater)
(("foo" . "bar") ("baz" . "qux"))
* (defparameter json-string (get-output-stream-string sated-json-eater))
JSON-STRING
* json-string
"{\"foo\":\"bar\",\"baz\":\"qux\"}"
This can be hidden away in a function:
(defun json-string-encode-alist (alist-to-encode)
(let ((stream (make-string-output-stream)))
(yason:encode-alist alist-to-encode stream)
(get-output-stream-string stream)))