赞 | 12 |
VIP | 107 |
好人卡 | 6 |
积分 | 4 |
经验 | 31122 |
最后登录 | 2024-6-29 |
在线时间 | 1606 小时 |
Lv2.观梦者 傻♂逼
- 梦石
- 0
- 星屑
- 374
- 在线时间
- 1606 小时
- 注册时间
- 2007-3-13
- 帖子
- 6562
|
加入我们,或者,欢迎回来。
您需要 登录 才可以下载或查看,没有帐号?注册会员
x
根据一个C#代码和一本书里面的翻译过来的
识别率不高……
另外为了提高效率,神经网络是事先Dump出来的
还有只能识别0~9
原始代吗是这个http://blog.linjian.org/articles/bp-mouse-gesture-recognition/
就是这样~
至于怎么用……
复制所有代码
DrawBox.new(x,y,width,height)
然后这只是个简易的版本,不解释……一般使用是够了……更多的功能……自己看着办吧……
NetworkFunctions.initData是构建网络
NetworkFunctions.save是保存网络
NetworkFunctions.load是读取网络
其他的什么常量啊7788的自己看着办被……- module Useful
- NUM_PATTERNS=10
- NUM_VECTORS=12
- MATCH_TOLERANCE=0.96
- ACTIVATION_RESPONSE=1.0
- BIAS=-1.0
- ERROR_THRESHOLD = 0.003
- LEARNING_RATE = 0.5
- NUM_HIDDEN_NEURONS = 6
- WITH_MOMENTUM = false
- MOMENTUM = 0.9
- WITH_NOISE = false
- MAX_NOISE_TO_ADD = 0.1
- InitNames=["0","1","2","3","4","5","6","7","8","9"]
- InitPatterns=
- [
- [ -0.96, 0.29, -0.71, 0.71, -0.29, 0.96, 0.29, 0.96, 0.71, 0.71, 0.96, 0.29, 0.96, -0.29, 0.71, -0.71, 0.29, -0.96, -0.29, -0.96, -0.71, -0.71, -0.96, -0.29 ],
- [ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 ],
- [ 1, 0, 1, 0, 1, 0, 1, 0, -0.71, 0.71, -0.71, 0.71, -0.71, 0.71, -0.71, 0.71, 1, 0, 1, 0, 1, 0, 1, 0 ],
- [ 1, 0, 1, 0, 0.91, 0.41, 0.41, 0.91, -0.41, 0.91, -0.91, 0.41, 0.91, 0.41, 0.41, 0.91, -0.41, 0.91, -0.91, 0.41, -1, 0, -1, 0 ],
- [ -0.32, 0.95, -0.32, 0.95, -0.32, 0.95, -0.32, 0.95, -0.32, 0.95, -0.32, 0.95, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0 ],
- [ -1, 0, -1, 0, -1, 0, 0, 1, 0, 1, 1, 0, 0.91, 0.41, 0.41, 0.91, -0.41, 0.91, -0.91, 0.41, -1, 0, -0.91, -0.41 ],
- [ -0.95, 0.32, -0.71, 0.71, -0.35, 0.94, 0, 1, 0.35, 0.94, 0.71, 0.71, 0.95, 0.32, 0.91, -0.41, 0.41, -0.91, -0.41, -0.91, -0.91, -0.41, -0.91, 0.41 ],
- [ 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 ],
- [ -0.89, -0.45, -0.89, 0.45, 0, 1, 0.89, 0.45, 0.89, 0.45, 0, 1, -0.89, 0.45, -0.89, -0.45, 0, -1, 0.89, -0.45, 0.89, -0.45, 0, -1 ],
- [ -1, 0, -1, 0, -1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1 ]
- ]
- def self.randomClamped
- a=rand(11)/10.0
- b=rand(11)/10.0
- return a-b
- end
- module RUN_MODE
- LEARNING=1
- ACTIVE=2
- UNREADY=3
- TRAINING=4
- end
- end
- class Neuron
- attr_accessor:numInputs
- attr_accessor:weights
- attr_accessor:prevUpdate
- attr_accessor:activation
- attr_accessor:error
- @numInputs=1
- =begin
- public Neuron(int _NumInputs)
- {
- NumInputs = _NumInputs + 1;
- Activation = 0;
- Error = 0;
- Weights = new List<double>();
- PrevUpdate = new List<double>();
- //生成随机权重
- for (int i = 0; i < NumInputs; i++)
- {
- Weights.Add(Useful.RandomClamped());
- PrevUpdate.Add(0.0);
- }
- }
- =end
- def initialize(numInputs)
- @numInputs=numInputs+1
- @activation=0
- @error=0
- @weights=[]
- @prevUpdate=[]
- for i in 0...numInputs
- @weights.push(Useful.randomClamped)
- @prevUpdate.push(0.0)
- end
- end
-
- end
- class NeuronLayer
- attr_accessor:numNeurons
- attr_accessor:neurons
- =begin
- public NeuronLayer(int _NumNeurons, int _NumInputsPerNeuron)
- {
- NumNeurons = _NumNeurons;
- Neurons = new List<Neuron>();
- for (int i = 0; i < NumNeurons; i++)
- {
- Neurons.Add(new Neuron(_NumInputsPerNeuron));
- }
- }
- =end
- def initialize(numNeurons,numInputsPerNeuron)
- @numNeurons=numNeurons
- @neurons=[]
- #p @numNeurons
- for i in 0...@numNeurons
- @neurons.push(Neuron.new(numInputsPerNeuron))
- end
- end
- end
- class NeuralNet
- attr_accessor:numInputs
- attr_accessor:numOutputs
- attr_accessor:numHiddenLayers
- attr_accessor:neuronsPerHiddenLyr
- attr_accessor:learningRate
- attr_accessor:errorSum
- attr_accessor:trained
- attr_accessor:numEpochs
- attr_accessor:layers
- =begin
- private bool NetworkTrainingEpoch(List<List<double>> SetIn, List<List<double>> SetOut)
- {
- if (Useful.WITH_MOMENTUM)
- {
- return NetworkTrainingEpochWithMomentum(SetIn, SetOut);
- }
- else
- {
- return NetworkTrainingEpochNonMomentum(SetIn, SetOut);
- }
- }
- =end
- def networkTrainingEpoch(setIn,setOut)
- #if Useful::WITH_MOMENTUM
- return networkTrainingEpochWithMomentum(setIn, setOut)
- #else
- # return networkTrainingEpochNonMomentum(SetIn, SetOut)
- # end
- end
- def networkTrainingEpochWithMomentum(setIn,setOut)
- @errorSum=0.0000000
- for vec in 0...setIn.size
- outputs=update(setIn[vec])
- if outputs.size==0
- p "Bad inputs"
- return false
- end
- for op in 0...@numOutputs
- err=0.000000000
- err=(setOut[vec][op]-outputs[op])*outputs[op]*(1.0000000-outputs[op])
- @layers[1].neurons[op].error=err
- @errorSum+=(setOut[vec][op] - outputs[op]) * (setOut[vec][op] - outputs[op])
- curWeight=0
- curNrnHid=0
- while (curWeight < @layers[1].neurons[op].weights.size - 1)
- @layers[1].neurons[op].weights[curWeight] += err * @learningRate * @layers[0].neurons[curNrnHid].activation
-
- curWeight+=1
- curNrnHid+=1
-
- end
- @layers[1].neurons[op].weights[curWeight] += err * @learningRate * Useful::BIAS
-
- end
- curNrnHid=0
- n=0
- while (curNrnHid < @layers[0].neurons.size)
- err=0.00000000
- curNrnOut=0.00000000
- while (curNrnOut < @layers[1].neurons.size)
- err+=@layers[1].neurons[curNrnOut].error * @layers[1].neurons[curNrnOut].weights[n]
- curNrnOut+=1
- end
- err*=@layers[0].neurons[curNrnHid].activation * (1.000000 - @layers[0].neurons[curNrnHid].activation)
- for w in 0...@numInputs
- @layers[0].neurons[curNrnHid].weights[w] += err * @learningRate * setIn[vec][w]
- end
- @layers[0].neurons[curNrnHid].weights[@numInputs] += err * @learningRate * Useful::BIAS
- curNrnHid+=1
- n+=1
- end
- end
- return true
- end
- =begin
- private void CreateNet()
- {
- if (NumHiddenLayers > 0)
- {
- //隐含层
- Layers.Add(new NeuronLayer(NeuronsPerHiddenLyr, NumInputs));
- for (int i = 0; i < NumHiddenLayers - 1; i++)
- {
- Layers.Add(new NeuronLayer(NeuronsPerHiddenLyr, NeuronsPerHiddenLyr));
- }
- //输出层
- Layers.Add(new NeuronLayer(NumOutputs, NeuronsPerHiddenLyr));
- }
- else
- {
- //输出层
- Layers.Add(new NeuronLayer(NumOutputs, NumInputs));
- }
- }
- =end
- def createNet
- if @numHiddenLayers>0
- #p @neuronsPerHiddenLyr
- @layers.push(NeuronLayer.new(@neuronsPerHiddenLyr, @numInputs))
- for i in 0...@numHiddenLayers -1
- @layers.push(NeuronLayer.new(@neuronsPerHiddenLyr, @neuronsPerHiddenLyr))
- end
- @layers.push(NeuronLayer.new(@numOutputs, @neuronsPerHiddenLyr))
- else
- @layers.push(NeuronLayer.new(@numOutputs, @numInputs))
- end
- end
- =begin
- private void InitializeNetwork()
- {
- //对于每一层执行
- for (int i = 0; i < NumHiddenLayers + 1; i++)
- {
- //对于每个神经元执行
- for (int n = 0; n < Layers[i].NumNeurons; n++)
- {
- //对于每个权重执行
- for (int k = 0; k < Layers[i].Neurons[n].NumInputs; k++)
- {
- Layers[i].Neurons[n].Weights[k] = Useful.RandomClamped();
- }
- }
- }
- ErrorSum = 9999;
- NumEpochs = 0;
- }
-
- =end
- def initializeNetwork
- for i in 0...@numHiddenLayers+1
- for n in 0...@layers[i].numNeurons #?
- for k in 0...@layers[i].neurons[n].numInputs
- @layers[i].neurons[n].weights[k] = Useful.randomClamped
- end
- end
- end
- @errorSum=9999
- @numEpochs=0
- end
- =begin
- private double Sigmoid(double activation, double response)
- {
- return (1.0 / (1.0 + Math.Exp(- activation / response)));
- }
- =end
- def sigmoid(activation,response)
- return (1.0000000/(1.0000000 + Math.exp(- activation / response)))
- end
- =begin
- public NeuralNet(int _NumInputs, int _NumOutputs, int _HiddenNeurons, double _LearningRate)
- {
- NumInputs = _NumInputs;
- NumOutputs = _NumOutputs;
- NumHiddenLayers = 1;
- NeuronsPerHiddenLyr = _HiddenNeurons;
- LearningRate = _LearningRate;
- ErrorSum = 9999;
- Trained = false;
- NumEpochs = 0;
- Layers = new List<NeuronLayer>();
- CreateNet();
- }
- =end
- def initialize(numInputs,numOutputs,hiddenNeurons,learningRate)
- @numInputs=numInputs
- @numOutputs=numOutputs
- @neuronsPerHiddenLyr=hiddenNeurons
- @learningRate=learningRate
- @numHiddenLayers=1
- @errorSum=9999
- @trained=false
- @numEpochs=0
- @layers=[]
- createNet
- end
- =begin
- public List<double> Update(List<double> _inputs)
- {
- List<double> inputs = new List<double>(_inputs);
- List<double> outputs = new List<double>();
- int cWeight = 0;
- //验证输入长度
- if (inputs.Count != NumInputs)
- {
- return outputs;
- }
- //对于每一层执行
- for (int i = 0; i < NumHiddenLayers + 1; i++)
- {
- if (i > 0)
- {
- inputs = new List<double>(outputs);
- }
- outputs.Clear();
- cWeight = 0;
- =end
- def update(inputs1)
- inputs=inputs1.clone
- outputs=[]
- cWeight=0
- if inputs.size!=@numInputs
- return outputs
- end
- for i in 0...@numHiddenLayers+1
- if (i>0)
- inputs = outputs.clone
- end
- outputs.clear
- cWeight=0
-
- =begin
- //对于每个神经元执行
- for (int n = 0; n < Layers[i].NumNeurons; n++)
- {
- double netinput = 0;
- int num = Layers[i].Neurons[n].NumInputs;
- //对于每个权重执行
- for (int k = 0; k < num - 1; k++)
- {
- netinput += Layers[i].Neurons[n].Weights[k] * inputs[cWeight++];
- }
- netinput += Layers[i].Neurons[n].Weights[num - 1] * Useful.BIAS;
- Layers[i].Neurons[n].Activation = Sigmoid(netinput, Useful.ACTIVATION_RESPONSE);
- outputs.Add(Layers[i].Neurons[n].Activation);
- cWeight = 0;
- }
- }
- return outputs;
- }
- =end
- for n in 0...@layers[i].numNeurons
- netinput=0.000000000
- num=@layers[i].neurons[n].numInputs
- for k in 0...num-1
- netinput+=@layers[i].neurons[n].weights[k] * inputs[cWeight]
- cWeight+=1
- end
- netinput += @layers[i].neurons[n].weights[num - 1] * Useful::BIAS
- @layers[i].neurons[n].activation = sigmoid(netinput, Useful::ACTIVATION_RESPONSE)
- outputs.push(layers[i].neurons[n].activation)
- cWeight=0
- end
- end
- return outputs
- end
- =begin
- if ((SetIn.Count != SetOut.Count) || (SetIn[0].Count != NumInputs) || (SetOut[0].Count != NumOutputs))
- {
- throw new Exception("训练集输入输出不符!");
- }
- InitializeNetwork();
- //训练直至错误小于阈值
- while (ErrorSum > Useful.ERROR_THRESHOLD)
- {
- //迭代训练
- if (!NetworkTrainingEpoch(SetIn, SetOut))
- {
- return false;
- }
- NumEpochs++;
- //窗体刷新
- SendMessage(NumEpochs, ErrorSum);
- }
- Trained = true;
- return true;
- =end
- def train(data)
- @v=Viewport.new(0,0,$原宽度,$原高度)
- @v.z=99999999
- @ss=Sprite.new
- @ss.z=99999999
- @ss.bitmap=Bitmap.new($原宽度,16)
- @ss.bitmap.fill_rect(0,0,$原宽度,16,Color.new(255,255,255))
- setIn=data.setIn.clone
- setOut=data.setOut.clone
- if ((setIn.size != setOut.size) or (setIn[0].size != @numInputs) or (setOut[0].size != @numOutputs))
- print "训练集输入输出不符!"
- exit(250)
- end
- initializeNetwork
- t=0
- while (@errorSum>Useful::ERROR_THRESHOLD)
- if (!networkTrainingEpoch(setIn, setOut))
- p "Bad Runnning"
- return false
- end
- @numEpochs+=1
- val=Useful::ERROR_THRESHOLD / @errorSum * $原宽度
- @ss.bitmap.fill_rect(0,0,[[val,$原宽度].min,0].max.to_i,32,Color.new(0,0,255))
- Graphics.update if (t % 100)==0
- t+=1
- end
- @v.dispose
- @ss.bitmap.dispose
- @ss.dispose
- @v=nil
- @ss=nil
- Graphics.update
- Graphics.update
- end
- end
- class GestureData
-
- attr_accessor:names
- attr_accessor:patterns
- attr_accessor:patternNumber
- attr_accessor:patternSize
- attr_accessor:setIn
- attr_accessor:setOut
- def init
-
- =begin
- for (int j = 0; j < PatternNumber; j++)
- {
- List<double> temp = new List<double>();
- for (int v = 0; v < PatternSize * 2; v++)
- {
- temp.Add(Useful.InitPatterns[j][v]);
- }
- Patterns.Add(temp);
- Names.Add(Useful.InitNames[j]);
- }
-
- =end
- for j in 0...@patternNumber
- temp=[]
- for v in 0...@patternSize*2
- temp.push(Useful::InitPatterns[j][v])
- end
- @patterns.push(temp)
- @names.push(Useful::InitNames[j])
- end
-
- end
- =begin
- public GestureData(int _PatternNumber, int _PatternSize)
- {
- Names = new List<string>();
- Patterns = new List<List<double>>();
- SetIn = new List<List<double>>();
- SetOut = new List<List<double>>();
- PatternNumber = _PatternNumber;
- PatternSize = _PatternSize;
- Init();
- CreateTrainingSet();
- }
- =end
- def initialize(patternNumber, patternSize)
- @names=[]
- @patterns=[]
- @setIn=[]
- @setOut=[]
- @patternNumber=patternNumber
- @patternSize=patternSize
- init
- createTrainingSet
- end
- =begin
- public string PatternName(int index)
- {
- if (Names[index] != null)
- {
- return Names[index];
- }
- else
- {
- return "";
- }
- }
- =end
- def patternName(index)
- if @names[index]!=nil
- return @names[index]
- else
- return ""
- end
- end
- =begin
- public bool AddPattern(List<double> _Pattern, string _Name)
- {
- //检查手势向量长度
- if (_Pattern.Count != PatternSize * 2)
- {
- throw new Exception("手势向量长度错误!");
- }
- Names.Add(_Name);
- Patterns.Add(new List<double>(_Pattern));
- PatternNumber++;
- CreateTrainingSet();
- return true;
- }
-
- =end
- def addPattern(pattern, name)
- if pattern.size!=@patternSize*2
- print "手势向量长度错误!"
- exit
- end
- @names.push(name)
- @pattern.push(pattern.clone)
- @patternNumber+=1
- createTrainingSet
- return true
- end
- =begin
- public void CreateTrainingSet()
- {
- //清空训练集
- SetIn.Clear();
- SetOut.Clear();
- //对每个手势操作
- for (int j = 0; j < PatternNumber; j++)
- {
- SetIn.Add(Patterns[j]);
- //相关的输出为1,不相关的输出为0
- List<double> outputs = new List<double>();
- for (int i = 0; i < PatternNumber; i++)
- {
- outputs.Add(0);
- }
- outputs[j] = 1;
- SetOut.Add(outputs);
- }
- }
- =end
- def createTrainingSet
- setIn.clear
- setOut.clear
- for j in 0...@patternNumber
- setIn.push(@patterns[j])
- outputs=[]
- for i in 0...@patternNumber
- outputs.push(0)
- end
- outputs[j]=1
- setOut.push(outputs)
- end
- end
- end
- =begin
- private void RenewNetwork()
- {
- Useful.LEARNING_RATE = (double)(txtLearning.Value);
- Useful.ERROR_THRESHOLD = (double)(txtThreshold.Value);
- Useful.NUM_HIDDEN_NEURONS = (int)(txtHidden.Value);
- Useful.WITH_MOMENTUM = chkMomentum.Checked;
- Useful.MOMENTUM = (double)(txtMomentum.Value);
- Useful.WITH_NOISE = chkNoise.Checked;
- Useful.MAX_NOISE_TO_ADD = (double)(txtNoise.Value);
- net = new NeuralNet(Useful.NUM_VECTORS * 2, NumValidPatterns, Useful.NUM_HIDDEN_NEURONS, Useful.LEARNING_RATE);
- net.SendMessage += new NeuralNet.DelegateOfSendMessage(ShowMessage);
- txtState.Text = "Training";
- TrainNetwork();
- txtState.Text = "Ready";
- }
- private bool TrainNetwork()
- {
- Mode = RUN_MODE.TRAINING;
- if (!(net.Train(data)))
- {
- return false;
- }
- Mode = RUN_MODE.ACTIVE;
- return true;
- }
- =end
- module NetworkFunctions
- =begin
- //手势相关数据对象
- private GestureData data;
- //神经网络对象
- private NeuralNet net;
- //手势数目
- private int NumValidPatterns;
- //需要记录的鼠标点数
- private int NumSmoothPoints;
- //用户鼠标输入的手势向量
- private List<Point> RawPath;
- //光滑化之后的手势向量
- private List<Point> SmoothPath;
- //待匹配的向量
- private List<double> Vectors;
- //网络最大的输出(最像的匹配)
- private double HighestOutput;
- //网络最大的输出对应的手势
- private int BestMatch;
- //匹配的手势
- private int Match;
- //程序的运行状态
- private RUN_MODE Mode;
- =end
- attr_accessor:net
- attr_accessor:data
- attr_accessor:mode
- attr_accessor:numValidPatterns
- attr_accessor:numSmoothPoints
- attr_accessor:rawPath
- attr_accessor:smoothPath
- attr_accessor:vectors
- attr_accessor:highestOutput
- attr_accessor:bestMatch
- attr_accessor:match
- def self.net
- return @net
- end
- def self.data
- return @data
- end
- def self.numValidPatterns
- return @numValidPatterns
- end
- def self.numSmoothPoints
- return @numSmoothPoints
- end
- def self.rawPath
- return @rawPath
- end
- def self.smoothPath
- return @smoothPath
- end
- def self.vectors
- return @vectors
- end
- def self.highestOutput
- return @highestOutput
- end
- def self.bestMatch
- return @bestMatch
- end
- def self.match
- return @match
- end
-
- def self.net=(v)
- return @net=v
- end
- def self.data=(v)
- return @data=v
- end
- def self.numValidPatterns=(v)
- return @numValidPatterns=v
- end
- def self.numSmoothPoints=(v)
- return @numSmoothPoints=v
- end
- def self.rawPath=(v)
- return @rawPath=v
- end
- def self.smoothPath=(v)
- return @smoothPath=v
- end
- def self.vectors=(v)
- return @vectors=v
- end
- def self.highestOutput=(v)
- return @highestOutput=v
- end
- def self.bestMatch=(v)
- return @bestMatch=v
- end
- def self.match=(v)
- return @match=v
- end
-
- def self.renewNetwork
- @net=NeuralNet.new(Useful::NUM_VECTORS * 2, @numValidPatterns, Useful::NUM_HIDDEN_NEURONS, Useful::LEARNING_RATE)
- trainNetwork
- return true
- end
- def self.trainNetwork
- @mode=Useful::RUN_MODE::TRAINING
- if !(@net.train(@data))
- return false
- end
- @mode=Useful::RUN_MODE::ACTIVE
- end
- def self.mode
- return @mode
- end
- def self.mode=(v)
- return @mode=v
- end
- def self.initData
- @mode=Useful::RUN_MODE::UNREADY
- @numValidPatterns=Useful::NUM_PATTERNS
- @numSmoothPoints=Useful::NUM_VECTORS + 1
- @highestOutput=0.00000000
- @bastMatch=-1
- @match=-1
- @rawPath=[]
- @smoothPath=[]
- @vectors=[]
- @data=GestureData.new(@numValidPatterns, Useful::NUM_VECTORS)
- @net=NeuralNet.new(Useful::NUM_VECTORS * 2, @numValidPatterns, Useful::NUM_HIDDEN_NEURONS, Useful::LEARNING_RATE)
- trainNetwork
- end
- def self.save
- save_data(@net,"BPNet.rvdata")
- save_data(@data,"BPNetD.rvdata")
- end
- def self.load
- @net = load_data("BPNet.rvdata")
- @data= load_data("BPNetD.rvdata")
- @mode=Useful::RUN_MODE::ACTIVE
- @numValidPatterns=Useful::NUM_PATTERNS
- @numSmoothPoints=Useful::NUM_VECTORS + 1
- @highestOutput=0.00000000
- @bastMatch=-1
- @match=-1
- @rawPath=[]
- @smoothPath=[]
- @vectors=[]
- end
- end
- =begin
- private void InitData()
- {
- Mode = RUN_MODE.UNREADY;
- NumValidPatterns = Useful.NUM_PATTERNS;
- NumSmoothPoints = Useful.NUM_VECTORS + 1;
- HighestOutput = 0.0;
- BestMatch = -1;
- Match = -1;
- RawPath = new List<Point>();
- SmoothPath = new List<Point>();
- Vectors = new List<double>();
- data = new GestureData(NumValidPatterns, Useful.NUM_VECTORS);
- net = new NeuralNet(Useful.NUM_VECTORS * 2, NumValidPatterns, Useful.NUM_HIDDEN_NEURONS, Useful.LEARNING_RATE);
- net.SendMessage += new NeuralNet.DelegateOfSendMessage(ShowMessage);
- }
- =end
- #NetworkFunctions.initData
- NetworkFunctions.load
- #NetworkFunctions.load
复制代码- class Bitmap
- #--------------------------------------------------------------------------
- # ● 描绘直线
- # x1,y1,x2,y2: 直线两端的坐标
- # width: 宽度
- # color: 颜色
- #--------------------------------------------------------------------------
- def drawline(x1, y1, x2, y2, width, color)
- x1 = x1.to_f
- y1 = y1.to_f
- x2 = x2.to_f
- y2 = y2.to_f
- width = width.to_f
- k = (y2 - y1) / (x2 - x1)
- if k.abs > 1
- drawline_x(x1, y1, x2, y2, width, color)
- else
- drawline_y(x1, y1, x2, y2, width, color)
- end
- end
- def drawline_x(x1, y1, x2, y2, width, color)
- l = ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 * width / (y1 - y2)
- length = l.abs * 2
- k = (x2 - x1) / (y2 - y1) #x=ky+b
- b = x1 - k * y1
- if l > 0
- for ty in y2.to_i..y1.to_i
- tx = ty * k + b
- fill_rect(tx - l, ty, length, 1, color)
- end
- else
- for ty in y1.to_i..y2.to_i
- tx = ty * k + b
- fill_rect(tx + l, ty, length, 1, color)
- end
- end
- end
- def drawline_y(x1, y1, x2, y2, width, color)
- l = ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 * width / (x1 - x2)
- height = l.abs * 2
- k = (y2 - y1) / (x2 - x1) #y=kx+b
- b = y1 - k * x1
- if l > 0
- for tx in x2.to_i..x1.to_i
- ty = tx * k + b
- fill_rect(tx, ty - l, 1, height, color)
- end
- else
- for tx in x1.to_i..x2.to_i
- ty = tx * k + b
- fill_rect(tx, ty + l, 1, height, color)
- end
- end
- end
- end
- class DrawBox
- include NetworkFunctions
- def initialize(x,y,width,height)
- @draw_sprite=Sprite.new
- @draw_sprite.x=x
- @draw_sprite.y=y
- @draw_sprite.z=999999999
- @draw_sprite.bitmap=Bitmap.new(width,height)
- NetworkFunctions.rawPath=[]
- @draw_area=Rect.new(x,y,width,height)
- @in_drawing=false
- end
- def update
- return if not Mouse.rect?(@draw_area)
- if Mouse.up?(1)
- # mouse_up
- @in_drawing=false
- @draw_sprite.bitmap.clear
- startMatch
-
- end
- if Mouse.click?(1)
- # mouse_down
- @draw_sprite.bitmap.clear
- @in_drawing=true
- @ox,@oy=Mouse.pos
- NetworkFunctions.rawPath.clear
- NetworkFunctions.smoothPath.clear
- NetworkFunctions.vectors.clear
- NetworkFunctions.rawPath.push(Mouse.pos)
- end
- if Mouse.pos!=[@ox,@oy] and @in_drawing
- @draw_sprite.bitmap.drawline(@ox,@oy, Mouse.pos[0],Mouse.pos[1],3,Color.new(255,255,255,88))
- NetworkFunctions.rawPath.push(Mouse.pos)
- @ox,@oy=Mouse.pos
- end
- end
- def startMatch
- if smooth
- createVectors
- if (NetworkFunctions.mode == Useful::RUN_MODE::ACTIVE)
- if not testForMatch
-
- return false
- else
- return showResult
- end
- else
- #p "bye"
- end
- end
- end
- def showResult
- p NetworkFunctions.data.patternName(NetworkFunctions.bestMatch)
- end
- =begin
- private bool TestForMatch()
- {
- List<double> outputs = net.Update(Vectors);
- if (outputs.Count == 0)
- {
- return false;
- }
- HighestOutput = 0;
- BestMatch = 0;
- Match = -1;
- for (int i = 0; i < outputs.Count; i++)
- {
- if (outputs[i] > HighestOutput)
- {
- //记录最像的匹配
- HighestOutput = outputs[i];
- BestMatch = i;
- //确定是这个手势
- if (HighestOutput > Useful.MATCH_TOLERANCE)
- {
- Match = BestMatch;
- }
- }
- }
- return true;
- }
- =end
- def testForMatch
- outputs=NetworkFunctions.net.update(NetworkFunctions.vectors)
- if outputs.size==0
- return false
- end
- NetworkFunctions.highestOutput=0
- NetworkFunctions.bestMatch=0
- NetworkFunctions.match=-1
- for i in 0...outputs.size
- if outputs[i]>NetworkFunctions.highestOutput
- NetworkFunctions.highestOutput=outputs[i]
- NetworkFunctions.bestMatch=i
- if (NetworkFunctions.highestOutput>Useful::MATCH_TOLERANCE)
- NetworkFunctions.match=NetworkFunctions.bestMatch
- end
- end
- end
- return true
- end
- =begin
- private bool Smooth()
- {
- //确保包含计算所需的足够的点
- if (RawPath.Count < NumSmoothPoints)
- {
- return false;
- }
- SmoothPath = new List<Point>(RawPath);
- //对所有的最小跨度点对取中点,删除原来的点,循环执行
- while (SmoothPath.Count > NumSmoothPoints)
- {
- double ShortestSoFar = double.MaxValue;
- int PointMarker = 0;
- //计算最小跨度
- for (int SpanFront = 2; SpanFront < SmoothPath.Count - 1; SpanFront++)
- {
- //计算点对距离
- double length = Math.Sqrt((double)
- ((SmoothPath[SpanFront - 1].X - SmoothPath[SpanFront].X) *
- (SmoothPath[SpanFront - 1].X - SmoothPath[SpanFront].X) +
- (SmoothPath[SpanFront - 1].Y - SmoothPath[SpanFront].Y) *
- (SmoothPath[SpanFront - 1].Y - SmoothPath[SpanFront].Y)));
- if (length < ShortestSoFar)
- {
- ShortestSoFar = length;
- PointMarker = SpanFront;
- }
- }
- //插入中点,删除原来的点
- Point newPoint = new Point();
- newPoint.X = (SmoothPath[PointMarker - 1].X + SmoothPath[PointMarker].X) / 2;
- newPoint.Y = (SmoothPath[PointMarker - 1].Y + SmoothPath[PointMarker].Y) / 2;
- SmoothPath[PointMarker - 1] = newPoint;
- SmoothPath.RemoveAt(PointMarker);
- }
- return true;
- }
- =end
- def smooth
- if NetworkFunctions.rawPath.size<NetworkFunctions.numSmoothPoints
- return false
- end
- NetworkFunctions.smoothPath=NetworkFunctions.rawPath.clone
- while (NetworkFunctions.smoothPath.size>NetworkFunctions.numSmoothPoints)
- shortestSoFar=1.79769313486232E+308
- pointMarker=0
- for spanFront in 2...NetworkFunctions.smoothPath.size-1
- length=Math.sqrt(((NetworkFunctions.smoothPath[spanFront - 1][0] - NetworkFunctions.smoothPath[spanFront][0]) *
- (NetworkFunctions.smoothPath[spanFront - 1][0] - NetworkFunctions.smoothPath[spanFront][0]) +
- (NetworkFunctions.smoothPath[spanFront - 1][1] - NetworkFunctions.smoothPath[spanFront][1]) *
- (NetworkFunctions.smoothPath[spanFront - 1][1] - NetworkFunctions.smoothPath[spanFront][1])))
- if length<shortestSoFar
- shortestSoFar=length
- pointMarker=spanFront
- end
- end
- newPoint=[]
- newPoint[0]=(NetworkFunctions.smoothPath[pointMarker - 1][0] + NetworkFunctions.smoothPath[pointMarker][0]) / 2.0000000000
- newPoint[1]=(NetworkFunctions.smoothPath[pointMarker - 1][1] + NetworkFunctions.smoothPath[pointMarker][1]) / 2.0000000000
- NetworkFunctions.smoothPath[pointMarker-1]=newPoint
- NetworkFunctions.smoothPath.delete_at(pointMarker)
- end
- return true
- end
- =begin
- for (int p = 1; p < SmoothPath.Count; ++p)
- {
- double x = (double)(SmoothPath[p].X - SmoothPath[p - 1].X);
- double y = (double)(SmoothPath[p].Y - SmoothPath[p - 1].Y);
- double len = Math.Sqrt((double)(x * x + y * y));
- Vectors.Add(x / len);
- Vectors.Add(y / len);
- }
- =end
- def createVectors
- for p in 1...NetworkFunctions.smoothPath.size
- x=(NetworkFunctions.smoothPath[p][0] - NetworkFunctions.smoothPath[p - 1][0]).to_f
- y=(NetworkFunctions.smoothPath[p][1] - NetworkFunctions.smoothPath[p - 1][1]).to_f
- len=Math.sqrt(x*x+y*y).to_f
- NetworkFunctions.vectors.push(x/len)
- NetworkFunctions.vectors.push(y/len)
- end
- end
- end
-
复制代码 手势文件放到根目录(可调)
Net.rar
(4.52 KB, 下载次数: 248)
|
|