Lua



_G is a special table of all globals.

local t = 3;  -- local to its block otherwise global
1-based indexing.
`foo:bar()` is the same as `foo.bar(foo)`
Set the value of a key to nil, you have removed it from the dictionary

-- comment
--[[
multi line comment
--]] 
+ - / %  math.abs , math.ceil , ...
' '   "  "       
[[
multi line string
]]

> who()  -- list the variables that we have defined.
> while n < 50 do n = n + 1 end 
> io.write(4 .. ' ')
> for i, v in ipairs({'a', 'b', 'c', 'd'}) do print(i, v) end
> for k, v in pairs({a=3, b=5, c=8, d=2}) do print(k, v) end
-- tables: list/array, dictionary
> t1 = {'a', 2, true, "another string"}    -- array
> t1[3]  -- true
> t2 = {abc=3, def=4, [30]='asd'}   -- dictionary
> t2['abc']  ====== t2.abc   but for numerical keys use [ ] notation
> table.insert(t1, 8)  =====  t1[#t1 + 1] = 8      -- finds the lowest integer index and increments it and assigns the new item in teh case of insert()
> t2.mynewitem = 9

-- functions and conditional statements
 function f(x)  
   if x then  -- x not nil or not false
       print('true!',x) 
   else 
       print('false!',x) 
   end 
end

f(nil)  -- false, 
f( anything except nil or false is true
 function g(x)
    x = x or 'undefined'
    print(x)
end

g() g(nil) g(false) -- undefined

 function adder(x)
    return function (y)
                    return x+y
               end
end

k = adder(5)
k(3) -- 8
 th> function addN(n) 
  nlocal = n  -- this becomes a persistent variable.
  flocal = function(x) nlocal = nlocal -1; return x + nlocal end 
  return flocal 
end
th> a = addN(20)
th> a(10)
  29
th> a(10)
  28
th> a(10)
  27


Modules

vim mod.lua 
local M = {}
function sayMyName()
    print('mort')
end

function M.sayHello()
    print('Hello')
    sayMyName()
end

return M  -- module returns M

$ th
> m = require('mod')
> m.sayHello()
> --modify and resave file, you have to load it like this:
> m = dofile('mod.lua')    -- equivalent to require with reload capability
> -- just load the file to memory, do not execute it
> f = loadfile 'mod.lua'
> m = f()
> -- execute lua strings as code
> g = loadstring 'print(123)' 
> g()
 I/O

Write
file = io.open("/path/to/file", "w")
file:write('\n')
file:close()

Read
for line in io.lines("my.txt") do print(line) end
 Snippets


getKeysArray(dict)
keyset={}
n=0

for k,v in pairs(dict) do
  n=n+1
  keyset[n]=k
end
return keyset
end

table.sort(keyset)

-----------
Sort dictionary by values
a = torch.load('a_dictionary.th')
b  = {}
for i,j in pairs(a) do
table.insert(b, {i,j})
end
table.sort(b, function(x,y) return x[2] > y[2] end ) -- indexed dictionary vs associative dictionary

Other Examples

  


Dictionary
t = {key1 = 'value1', key2 = false} 
print(t.key1) -- Prints 'value1'. 
t.newKey = {} -- Adds a new key/value pair. 
t.key2 = nil -- Removes key2 from the table.
-- You can use literal notation for any value as key, but objects are not portable unless you use the exact reference 
u = {['@!#'] = 'qbert', [{}] = 1729, [6.28] = 'tau'} print(u[6.28]) -- prints "tau"




lists
x, y, z = 1, 2, 3, 4  -- Unmatched receivers are nil, unmatched senders are discarded.
function bar(a, b, c)
  print(a, b, c)
  return 4, 8, 15, 16, 23, 42
end

x, y = bar('zaphod')  --> prints "zaphod  nil nil"
-- Now x = 4, y = 8, values 15..42 are discarded.










Torch

> torch.type('we') -- string, number, table
> t = torch.tensor(2,3,4)   
> #t
 2
 3
 4
[torch.LongStorage of size 3]
y = torch.Tensor(2,2)
y[{1,1}] -- first row and col element
> i = 0
> t:apply(function() i = i+1; return i; end)  -- applies the function element by element. So matrix becomes from 1 to 24
> torch.type(t)   -- torch.DoubleTensor. Other types: ByteTensor ,CharTensor ,ShortTensor ,IntTensor ,LongTensor  ,FloatTensor ,DoubleTensor
> torch.setdefaulttensortype('torch.FloatTensor')   -- float is faster and deep learning doesn't need double precision
-- A tensor is simply a view on a specific underlying storage
> r = torch.tensor(t):resize(3,8) --  error for type
> r = torch.DoubleTensor(t):resize(3,8) -- O.K.
-- assignment in tensors is just copying references not a deep copy
> u = t:clone()  -- deep clone
> u:random() -- fill random
> v =  torch.Tensor({1,2,3,4})  -- create a tensor from lua array
> #v
 4
[torch.LongStorage of size 1]
>v:size(1)   -- get dimension size.
4
> w = torch.ones(4)
> v * w
> t:dim()  -- number of dimensions in t = 3
x = torch.Tensor({2,4,6,8,3,6,4,5,1})   -- indexing and selection ranges (slicing)
x[ {{3,4}} ]  -- 6,8
> x[{{3,-1}}] -- 6 8 3 6 4 5 1
> v = torch.range(1,4)  -- create tensor {1,2,3,4}
> v:pow(2) -- square, in place
> w = torch.pow(v,2) 
> m = torch.Tensor({{9,6,3,4},{7,2,8,1}})  -- 2d tensor
> M:dim()
> m:size()
> m:size(1)
> m:size(2)
> #m
> m[2][3] --8
> m[{2,3}] -- 8 , same as above
> m[{{},{1}}]  -- extract first column.   {} all rows, {1} first column   -- 2x1 matrix
m[{{},1}]   -- extract the same thing a as vector.    -- 2 vector
> m[{{2},{}}]  -- 2nd row , all columns
> m[{2,{}}]  -- 2nd row , all columns as vector

> torch.range(3,-4.2, -1.9) -- from 3 to -4.2 step size -1.9
> torch.linspace(3,8,50) -- 50 items between range 3 to 8
> torch.logpace(3,8,50)
> ones, eye, zeros, randn (normal), rand (uniform)

> dM = m:double() -- cast to double
> m * torch.rand(4,6)
> m:cmul(torch.rand(2,4)) -- element wise (number of elements must match, but sizes do not matter.)
> m:t() -- transpose

a = torch.Tensor({{1,2,3,4}})
b = torch.Tensor({{5,6,7,8}})
> torch.cat(a,b,1) -- cocnat along first dimension
> torch.cat(a,b,2)  -- cocnat along second dimension 

> m* 2 , to utilize same storage use m:mul(2)

> torch.pow(m,2)  -- new storage
> m:pow(2)  -- same storage

reshape, view, resize
> a = torch.range(1,12)
> b = a:reshape(3,4)   -- 3 rows, 4 columns. new matrix
> c = a:view(4,3)  -- same matrix but in new dimensions. So if any changes happen both c and a change.
> a:resize(1,12)  -- in place resize
>a[{1,{1,6}}]:mul(2) -- first row, columns 1 through 6 multiply by 2 in place. a will be modified
> c[1][1] = -90   -- equivalent item in a (a[1][1]) will also change (they are one matrix storage).
> b:random(12) -- random integers from 1 to 12. a and c don't change
a and c are two different views of the same storage.




Lua and images
$ luarocks list env
$ qlua -lenv   # load qt-enabled lua and load library env. Provides our RPEL with nice functionalities
t7> ai = image.load('ai.jpg')
t7> #ai
t7> = torch.type(ai)
t7> os.execute('system_profiler SPDisplaysDataType | grep resolution') -- resolution 1000 * 900
t7> ai_scale = image.scale(ai, 1000/2, 900/2)
t7> = ai_scale
t7> image.display(ai_scale)
t7> ai_green = ai_scale:clone()
t7> ai_green[1]:fill(0) --red
t7> ai_green[3]:fill(0) --blue, only left with green
t7> image.display({image={ai_scale, ai_green}, legend='view images here', zoom=0.5})
t7> ai_red = torch.zeros(#ai_scale)
t7> ai_red[1] = ai_scale[1]
t7> ai_blue = torch.zeros(#ai_scale)
t7> ai_blue[3] = ai_scale[3]
t7> image.display({image={ai_scale, ai_green, ai_red, ai_blue}, legend='view images here', zoom=0.5, nrow=2})
t7> output_image = image.toDisplayTensor({image={ai_scale, ai_green, ai_red, ai_blue}, legend='view images here', zoom=0.5, nrow=2}) -- save generated image as image.
t7> image.savePNG('generated_image.png', output_image)
t7> crop_image = image.crop(out_image, 0,0,1000/2,900/2)
t7> image.display(image.drawRect(image.lena(), 200,200, 400,400, {lineWidth=5, color={0,255,0},}))   -- lena with a bounding rectangle on her face.



bias is 1. So .... + theta0 * 1.




bias is always one, others are between 0 and 1 which shows how you've performed on those classes (each class grade one x), out put is 0 if less than zero or 1 if greater than. Which is a step function.  Which is approximated by sigmoid function.













Subpages (1): extra stuff
Comments