#-- # DO NOT MODIFY!!!! # This file is automatically generated by rex 1.0.5 # from lexical definition file "t64.rex". #++ require 'racc/parser' class T64 < Racc::Parser require 'strscan' class ScanError < StandardError ; end attr_reader :lineno attr_reader :filename attr_accessor :state def scan_setup(str) @ss = StringScanner.new(str) @lineno = 1 @state = nil end def action yield end def scan_str(str) scan_setup(str) do_parse end alias :scan :scan_str def load_file( filename ) @filename = filename open(filename, "r") do |f| scan_setup(f.read) end end def scan_file( filename ) load_file(filename) do_parse end def next_token return if @ss.eos? # skips empty actions until token = _next_token or @ss.eos?; end token end def _next_token text = @ss.peek(1) @lineno += 1 if text == "\n" token = case @state when nil case when (text = @ss.scan(/reta/)) action { [:REG,15] } when (text = @ss.scan(/r\d+/)) action { [:REG,text.to_i]} when (text = @ss.scan(/0x[0-9a-f]+/)) action { [:NUM, make_bytes(text.to_i(16))] } when (text = @ss.scan(/\d+/)) action { [:NUM, make_bytes(text.to_i)] } when (text = @ss.scan(/\n/)) action { [:NEWLINE,"\n"]} when (text = @ss.scan(/[ ]+/)) action { } when (text = @ss.scan(/\(/)) action { [:LPAREN,"("] } when (text = @ss.scan(/\)/)) action { [:RPAREN,")"] } when (text = @ss.scan(/,/)) action { [:COMMA,","] } when (text = @ss.scan(/\#.+/)) action {} when (text = @ss.scan(/:/)) action { [:COLON,text]} when (text = @ss.scan(/LDB/)) action { [:LDB,text] } when (text = @ss.scan(/LDW/)) action { [:LDW,text] } when (text = @ss.scan(/LDDW/)) action { [:LDDW,text] } when (text = @ss.scan(/LD/)) action { [:LD,text] } when (text = @ss.scan(/STB/)) action { [:STB,text] } when (text = @ss.scan(/STW/)) action { [:STW,text] } when (text = @ss.scan(/STDW/)) action { [:STDW,text] } when (text = @ss.scan(/ST/)) action { [:ST,text] } when (text = @ss.scan(/ARIB/)) action { [:ARIB,text] } when (text = @ss.scan(/ARIW/)) action { [:ARIW,text] } when (text = @ss.scan(/ARIDW/)) action { [:ARIDW,text] } when (text = @ss.scan(/ARI/)) action { [:ARI,text] } when (text = @ss.scan(/ARB/)) action { [:ARB,text] } when (text = @ss.scan(/ARW/)) action { [:ARW,text] } when (text = @ss.scan(/ARDW/)) action { [:ARDW,text] } when (text = @ss.scan(/AR/)) action { [:AR,text] } when (text = @ss.scan(/JMP/)) action { [:JMP,text] } when (text = @ss.scan(/JC/)) action { [:JC,text] } when (text = @ss.scan(/JNC/)) action { [:JNC,text] } when (text = @ss.scan(/JZ/)) action { [:JZ,text] } when (text = @ss.scan(/JNZ/)) action { [:JNZ,text] } when (text = @ss.scan(/JST/)) action { [:JST,text] } when (text = @ss.scan(/db/)) action { [:DB,text] } when (text = @ss.scan(/dw/)) action { [:DW,text] } when (text = @ss.scan(/ddw/)) action { [:DDW,text] } when (text = @ss.scan(/dqw/)) action { [:DQW,text] } when (text = @ss.scan(/org/)) action { [:ORG,text] } when (text = @ss.scan(/NOT/)) action { [:AROP,text] } when (text = @ss.scan(/AND/)) action { [:AROP,text] } when (text = @ss.scan(/OR/)) action { [:AROP,text] } when (text = @ss.scan(/ADD/)) action { [:AROP,text] } when (text = @ss.scan(/SUB/)) action { [:AROP,text] } when (text = @ss.scan(/MULL/)) action { [:AROP,text] } when (text = @ss.scan(/MULH/)) action { [:AROP,text] } when (text = @ss.scan(/MULUL/)) action { [:AROP,text] } when (text = @ss.scan(/MULUH/)) action { [:AROP,text] } when (text = @ss.scan(/DIVL/)) action { [:AROP,text] } when (text = @ss.scan(/DIVH/)) action { [:AROP,text] } when (text = @ss.scan(/DIVUL/)) action { [:AROP,text] } when (text = @ss.scan(/DIVUH/)) action { [:AROP,text] } when (text = @ss.scan(/NEG/)) action { [:AROP,text] } when (text = @ss.scan(/CMP/)) action { [:AROP,text] } when (text = @ss.scan(/ADC/)) action { [:AROP,text] } when (text = @ss.scan(/SBB/)) action { [:AROP,text] } when (text = @ss.scan(/HLT/)) action { [:HLT,text] } when (text = @ss.scan(/\w+/)) action { [:IDENT,text] } else text = @ss.string[@ss.pos .. -1] raise ScanError, "can not match: '" + text + "'" end # if else raise ScanError, "undefined state: '" + state.to_s + "'" end # case state token end # def _next_token def tokenize(code) scan_setup(code) tokens = [] while token = next_token tokens << token end tokens end def make_bytes(val) bytes=[] 8.times do |i| mask=0xFF << i*8 byte=(val&mask) >> i*8 bytes.push byte end return bytes end end # class