@inproceedings{1103641,
author = {David J. Malan and Michael D. Smith},
title = {Host-based detection of worms through peer-to-peer cooperation},
booktitle = {Proceedings of the 2005 ACM Workshop on Rapid Malcode (WORM)},
year = {2005},
month = {November},
pages = {72--80},
doi = {http://doi.acm.org/10.1145/1103626.1103641},
abstract = {We propose a host-based, runtime defense against worms that achieves negligible risk of false positives through peer-to-peer cooperation. We view correlation among otherwise independent peers' behavior as anomalous behavior, indication of a fast-spreading worm. We detect correlation by exploiting worms' \textit{temporal consistency}, similarity (low temporal variance) in worms' invocations of system calls. We evaluate our ideas on Windows XP with Service Pack 2 using traces of nine variants of worms and twenty-five non-worms, including ten commercial applications and fifteen processes native to the platform. We find that two peers, upon exchanging snapshots of their internal behavior, defined with frequency distributions of system calls, can decide that they are, more likely than not, executing a worm between 76\% and 97\% of the time. More importantly, we find that the probability that peers might err, judging a non-worm a worm, is negligible.}
}

@inproceedings{dash06gossip,
author = {Denver Dash and Branislav Kveton and John Mark Agosta and Eve Schooler and Jaideep Chandrashekar and Abraham Bachrach and Alex Newman},
title = {When Gossip is Good: Distributed Probabilistic Inference for Detection of Slow Network Intrusions},
booktitle = {Proceedings of the 21st National Conference on Artificial Intelligence},
year = {2006},
month = {July},
pages = {1115--1122},
pdf = {http://www.cs.pitt.edu/~bkveton/docs/aaai2006b.pdf},
abstract = {Intrusion attempts due to self-propagating code are becoming an increasingly urgent problem, in part due to the homogeneous makeup of the internet. Recent advances in anomalybased intrusion detection systems (IDSs) have made use of the quickly spreading nature of these attacks to identify them with high sensitivity and at low false positive (FP) rates. However, slowly propagating attacks are much more difficult to detect because they are cloaked under the veil of normal network traffic, yet can be just as dangerous due to their exponential spread pattern. We extend the idea of using collaborative IDSs to corroborate the likelihood of attack by imbuing end hosts with probabilistic graphical models and using random messaging to gossip state among peer detectors. We show that such a system is able to boost a weak anomaly detector D to detect an order-of-magnitude slower worm, at false positive rates less than a few per week, than would be possible using D alone at the end-host or on a network aggregation point. We show that this general architecture is scalable in the sense that a fixed absolute false positive rate can be achieved as the network size grows, spreads communication bandwidth uniformly throughout the network, and makes use of the increased computation power of a distributed system. We argue that using probabilistic models provides more robust detections than previous collaborative counting schemes and allows the system to account for heterogeneous detectors in a principled fashion.}
}

@inproceedings{1179548,
author = {David J. Malan and Michael D. Smith},
title = {Exploiting temporal consistency to reduce false positives in host-based, collaborative detection of worms},
booktitle = {Proceedings of the 4th ACM Workshop on Recurring Malcode (WORM)},
year = {2006},
pages = {25--32},
doi = {http://doi.acm.org/10.1145/1179542.1179548},
abstract = {The speed of today's worms demands automated detection, but the risk of false positives poses a difficult problem. In prior work, we proposed a host-based intrusion-detection system for worms that leveraged collaboration among peers to lower its risk of false positives, and we simulated this approach for a system with two peers. In this paper, we build upon that work and evaluate our ideas in the wild.'' We implement Wormboy 2.0, a prototype of our vision that allows us to quantify and compare worms' and non-worms' temporal consistency, similarity over time in worms' and non-worms' invocations of system calls. We deploy our prototype to a network of 30 hosts running Windows XP with Service Pack 2 to monitor and analyze 10,776 processes, inclusive of 511 unique non-worms (873 if we consider unique versions to be unique non-worms). We identify properties with which we can distinguish non-worms from worms 99\% of the time. We find that our collaborative architecture, using patterns of system calls and simple heuristics, can detect worms running on multiple peers. And we find that collaboration among peers significantly reduces our probability of false positives because of the unlikely appearance on many peers simultaneously of non-worm processes with worm-like properties.}
}

@inproceedings{Stafford06,
author = {Shad Stafford and Jun Li and Toby Ehrenkranz},
title = {On the Performance of {SWORD} in Detecting Zero-Day-Worm-Infected Hosts},
booktitle = {Proceedings of the International Symposium on Performance Evaluation of Computer and Telecommunication Systems (SPECTS)},
year = {2006},
pdf = {http://netsec.cs.uoregon.edu/pubs/SPECT06-sword-final.pdf}
}

@inproceedings{BinkleySingh2006,
author = {James R. Binkley and Suresh Singh},
title = {An Algorithm for Anomaly-based Botnet Detection},
booktitle = {Proceedings of USENIX Steps to Reducing Unwanted Traffic on the Internet Workshop (SRUTI)},
pages = {43--48},
year = {2006},
month = {July},
http = {http://www.usenix.org/events/sruti06/tech/binkley.html},
abstract = {We present an anomaly-based algorithm for detecting IRC-based botnet meshes. The algorithm combines an IRC mesh detection component with a TCP scan detection heuristic called the TCP work weight. The IRC component produces two tuples, one for determining the IRC mesh based on IP channel names, and a sub-tuple which collects statistics (including the TCP work weight) on individual IRC hosts in channels. We sort the channels by the number of scanners producing a sorted list of potential botnets. This algorithm has been deployed in PSU's DMZ for over a year and has proven effective in reducing the number of botnet clients.}
}

@inproceedings{Cooke2005,
author = {Evan Cooke and Farnam Jahanian and Danny McPherson},
title = {The Zombie Roundup: Understanding, Detecting, and Disrupting Botnets},
booktitle = {Proceedings of USENIX Steps to Reducing Unwanted Traffic on the Internet Workshop (SRUTI)},
pages = {39--44},
year = {2005},
month = {July},
http = {http://www.usenix.org/events/sruti05/tech/cooke.html},
abstract = {Global Internet threats are undergoing a profound transformation from attacks designed solely to disable infrastructure to those that also target people and organizations. Behind these new attacks is a large pool of compromised hosts sitting in homes, schools, businesses, and governments around the world. These systems are infected with a bot that communicates with a bot controller and other bots to form what is commonly referred to as a zombie army or botnet. Botnets are a very real and quickly evolving problem that is still not well understood or studied. In this paper we outline the origins and structure of bots and botnets and use data from the operator community, the Internet Motion Sensor project, and a honeypot experiment to illustrate the botnet problem today. We then study the effectiveness of detecting botnets by directly monitoring IRC communication or other command and control activity and show a more comprehensive approach is required. We conclude by describing a system to detect botnets that utilize advanced command and control systems by correlating secondary detection data from multiple sources.}
}

@inproceedings{1162667,
author = {Janak J. Parekh and Ke Wang and Salvatore J. Stolfo},
title = {Privacy-preserving payload-based correlation for accurate malicious traffic detection},
booktitle = {Proceedings of the SIGCOMM Workshop on Large-Scale Attack Defense (LSAD)},
year = {2006},
pages = {99--106},
doi = {http://doi.acm.org/10.1145/1162666.1162667},
}

@inproceedings{ZouCunningham2006,
author = {Cliff C. Zou and Ryan Cunningham},
title = {Honeypot-Aware Advanced Botnet Construction and Maintenance},
booktitle = {Proceedings of the International Conference on Dependable Systems and Networks (DSN)},
year = {2006},
month = {June},
pages = {199--208},
pdf = {http://www.cs.ucf.edu/~czou/research/honeypot-DSN06.pdf},
doi = {http://doi.ieeecomputersociety.org/10.1109/DSN.2006.38},
abstract = {Because botnets'' can be used for illicit financial gain, they have become quite popular in recent Internet attacks. Honeypots'' have been successfully deployed in many defense systems. Thus, attackers constructing and maintaining botnets will be forced to find ways to avoid honeypot traps. In this paper, we present a hardware and software independent honeypot detection methodology based on the following assumption: security professionals deploying honeypots have liability constraints such that they cannot allow their honeypots to participate in real (or too many real) attacks. Based on this assumption, attackers can detect honeypots in their botnet by checking whether the compromised machines in the botnet can successfully send out unmodified malicious traffic to attackers' sensors or whether the bot controller in their botnet can successfully relay potential attack commands. In addition, we present a novel two-stage reconnaissance'' worm that can automatically construct a peer-to-peer structured botnet and detect and remove infected honeypots during its propagation stage. Finally, we discuss some guidelines for defending against the general honeypot-aware attacks.}
}

@inproceedings{Freiling+05,
author = {Felix C. Freiling and Thorsten Holz and Georg Wicherski},
title = {Botnet Tracking: Exploring a Root-Cause Methodology to Prevent Distributed Denial-of-Service Attacks},
booktitle = {Proceedings of the 10th European Symposium on Research in Computer Security (ESORICS)},
year = {2005},
pages = {319--335},
doi = {http://dx.doi.org/10.1007/11555827_19},
month = {September},
series = {Lecture Notes in Computer Science},
volume = {3679},
abstract = {Denial-of-Service (DoS) attacks pose a significant threat to the Internet today especially if they are distributed, i.e., launched simultaneously at a large number of systems. \textit{Reactive} techniques that try to detect such an attack and throttle down malicious traffic prevail today but usually require an additional infrastructure to be really effective. In this paper we show that \textit{preventive} mechanisms can be as effective with much less effort: We present an approach to (distributed) DoS attack prevention that is based on the observation that coordinated automated activity by many hosts needs a mechanism to remotely control them. To prevent such attacks, it is therefore possible to identify, infiltrate and analyze this remote control mechanism and to stop it in an automated fashion. We show that this method can be realized in the Internet by describing how we infiltrated and tracked IRC-based \textit{botnets} which are the main DoS technology used by attackers today.}
}

@inproceedings{1177086,
author = {Moheeb Abu Rajab and Jay Zarfoss and Fabian Monrose and Andreas Terzis},
title = {A multifaceted approach to understanding the botnet phenomenon},
booktitle = {Proceedings of the 6th ACM SIGCOMM on Internet Measurement (IMC)},
year = {2006},
pages = {41--52},
doi = {http://doi.acm.org/10.1145/1177080.1177086},
abstract = {The academic community has long acknowledged the existence of malicious botnets, however to date, very little is known about the behavior of these distributed computing platforms. To the best of our knowledge, botnet behavior has never been methodically studied, botnet prevalence on the Internet is mostly a mystery, and the botnet life cycle has yet to be modeled. Uncertainty abounds. In this paper, we attempt to clear the fog surrounding botnets by constructing a multifaceted and distributed measurement infrastructure. Throughout a period of more than three months, we used this infrastructure to track 192 unique IRC botnets of size ranging from a few hundred to several thousand infected end-hosts. Our results show that botnets represent a major contributor to unwanted Internet traffic -- 27\% of all malicious connection attempts observed from our distributed darknet can be directly attributed to botnet-related spreading activity. Furthermore, we discovered evidence of botnet infections in 11\% of the 800,000 DNS domains we examined, indicating a high diversity among botnet victims. Taken as a whole, these results not only highlight the prominence of botnets, but also provide deep insights that may facilitate further research to curtail this phenomenon.}
}

@inproceedings{1162669,
author = {Zhichun Li and Yan Chen and Aaron Beach},
title = {Towards scalable and robust distributed intrusion alert fusion with good load balancing},
booktitle = {Proceedings of the SIGCOMM Workshop on Large-Scale Attack Defense (LSAD)},
year = {2006},
pages = {115--122},
doi = {http://doi.acm.org/10.1145/1162666.1162669},
abstract = {Traffic anomalies and distributed attacks are commonplace in today's networks. Single point detection is often insufficient to determine the causes, patterns and prevalence of such events. Most existing distributed intrusion detection systems (DIDS) rely on centralized fusion, or distributed fusion with unscalable communication mechanisms. In this paper, we propose to build a DIDS based on the emerging decentralized location and routing infrastructure: \textit{distributed hash table (DHT)}. We embed the intrusion symptoms into the DHT dimensions so that alarms related to the same intrusion (thus with similar symptoms) will be routed to the same sensor fusion center (SFC) while evenly distributing unrelated alarms to different SFCs. This is achieved through careful routing key design based on: 1) analysis of essential characteristics of four common types of intrusions: DoS attacks, port scanning, virus/worm infection and botnets; and 2) distribution and stability analysis of the popular port numbers and those of the popular source IP subnets in scans. We further propose several schemes to distribute the alarms more evenly across the SFCs, and improve the resiliency against the failures or attacks. Evaluation based on one month of DShield firewall logs (600 million scan records) collected from over 2200 worldwide providers show that the resulting system, termed \textit{Cyber Disease DHT (CDDHT)}, can effectively fuse related alarms while distributing unrelated ones evenly among the SFCs. It significantly outperforms the traditional hierarchical approach when facing large amounts of \textit{diverse} intrusion alerts.}
}

@inproceedings{King+05,
author = {Samuel T. King and Zhuoqing Morley Mao and Dominic G. Lucchetti and Peter M. Chen},
title = {Enriching Intrusion Alerts Through Multi-Host Causality},
booktitle = {Proceedings of the Network and Distributed System Security Symposium (NDSS)},
year = {2005},
pdf = {http://www.isoc.org/isoc/conferences/ndss/05/proceedings/papers/camera.pdf},
}

@inproceedings{BarfordYegneswaran,
author = {Paul Barford and Vinod Yegneswaran},
title = {An Inside Look at Botnets},
booktitle = {Special Workshop on Malware Detection, Advances in Information Security},
optpages = {},
year = {2006},
opteditor = {},
optvolume = {},
optnumber = {},
optseries = {},
optmonth = {},
pdf = {http://www.cs.wisc.edu/~pb/botnets_final.pdf},
publisher = {Springer Verlag},
abstract = {The continued growth and diversification of the Internet has been accompanied by an increasing prevalence of attacks and intrusions \cite{781045}. It can be argued, however, that a significant change in motivation for malicious activity has taken place over the past several years: from vandalism and recognition in the hacker community, to attacks and intrusions for financial gain. This shift has been marked by a growing sophistication in the tools and methods used to conduct attacks, thereby escalating the network security arms race.\par Our thesis is that the \textit{reactive} methods for network security that are predominant today are ultimately insufficient and that more proactive methods are required. One such approach is to develop a foundational understanding of the mechanisms employed by malicious software (malware) which is often readily available in source form on the Internet. While it is well known that large IT security companies maintain detailed databases of this information, these are not openly available and we are not aware of any such open repository. In this paper we begin the process of codifying the capabilities of malware by dissecting four widely-used Internet Relay Chat (IRC) botnet codebases. Each codebase is classified along seven key dimensions including botnet control mechanisms, host control mechanisms, propagation mechanisms, exploits, delivery mechanisms, obfuscation and deception mechanisms. Our study reveals the complexity of botnet software, and we discuss implications for defense strategies based on our analysis.}
}

@inproceedings{781045,
author = {Vinod Yegneswaran and Paul Barford and Johannes Ullrich},
title = {Internet intrusions: global characteristics and prevalence},
booktitle = {Proceedings of the ACM SIGMETRICS International Conference on Measurement and Modeling of Computer Systems},
year = {2003},
month = {June},
pages = {138--147},
doi = {http://doi.acm.org/10.1145/781027.781045},
abstract = {Network intrusions have been a fact of life in the Internet for many years. However, as is the case with many other types of Internet-wide phenomena, gaining insight into the \textit{global} characteristics of intrusions is challenging. In this paper we address this problem by systematically analyzing a set of firewall logs collected over four months from over 1600 different networks world wide. The first part of our study is a general analysis focused on the issues of distribution, categorization and prevalence of intrusions. Our data shows both a large quantity and wide variety of intrusion attempts on a daily basis. We also find that worms like CodeRed, Nimda and SQL Snake persist long after their original release. By projecting intrusion activity as seen in our data sets to the entire Internet we determine that there are typically on the order of 25B intrusion attempts per day and that there is an increasing trend over our measurement period. We further find that sources of intrusions are uniformly spread across the Autonomous System space. However, deeper investigation reveals that a very small collection of sources are responsible for a significant fraction of intrusion attempts in any given month and their on/off patterns exhibit cliques of correlated behavior. We show that the distribution of source IP addresses of the non-worm intrusions as a function of the number of attempts follows Zipf's law. We also find that at daily timescales, intrusion targets often depict significant spatial trends that blur patterns observed from individual IP telescopes''; this underscores the necessity for a more global approach to intrusion detection. Finally, we investigate the benefits of shared information, and the potential for using this as a foundation for an automated, global intrusion detection framework that would identify and isolate intrusions with greater precision and robustness than systems with limited perspective.}
}

@techreport{VogtAycock2006,
author = {Ryan Vogt and John Aycock},
title = {Attack of the 50 Foot Botnet},
institution = {Department of Computer Science, University of Calgary},
year = {2006},
number = {2006-840-33},
month = {August},
pdf = {http://pages.cpsc.ucalgary.ca/~aycock/papers/50foot.pdf},
http = {http://pharos.cpsc.ucalgary.ca/Dienst/UI/2.0/Describe/ncstrl.ucalgary_cs/2006-840-33},
abstract = {The trend toward smaller botnets may be \textit{more} dangerous in terms of large-scale attacks like distributed denials of service. We examine the possibility of super-botnets,'' networks of independent botnets that can be coordinated for attacks of unprecedented scale. For an adversary, super-botnets would also be extremely versatile and resistant to countermeasures. Our simulation results shed light on the feasibility and structure of super-botnets and some properties of their command-and-control mechanism. Possible defenses against the threat of super-botnets are suggested.}
}

@inproceedings{719482,
author = {Jiahai Yang and Peng Ning and Xiaoyang Sean Wang and Sushil Jajodia},
title = {{CARDS}: A Distributed System for Detecting Coordinated Attacks},
booktitle = {Proceedings of the IFIP TC11 Fifteenth Annual Working Conference on Information Security for Global Information Infrastructures},
year = {2000},
pages = {171--180},
pdf = {http://www.cs.uvm.edu/~xywang/publications/sec2000.pdf},
abstract = {A major research problem in intrusion detection is the efficient Detection of coordinated attacks over large networks. Issues to be resolved include determining what data should be collected, which portion of the data should be analyzed, where the analysis of the data should take place, and howto correlate multi-source information. This paper proposes the architecture of a Coordinated Attack Response \& Detection System (CARDS). CARDS uses a signature-based model for resolving these issues. It consists of signature managers, monitors, and directory services. The system collects data in a flexible, distributed manner, and the detection process is decentralized among various monitors and is event-driven. The paper also discusses related implementation issues.}
}

@techreport{Zou2004,
author = {Cliff C. Zou and Don Towsley and Weibo Gong},
title = {A Firewall Network System for Worm Defense in Enterprise Networks},
institution = {University of Massachusetts Amherst, College of Engineering},
year = {2004},
number = {TR-04-CSE-01},
month = {February},
pdf = {http://www.cs.ucf.edu/~czou/research/FirewallNetwork-techreport.pdf}
}

@article{Goebel+2006,
author = {Jan G\"obel and Jens Hektor and Thorsten Holz},
title = {Advanced Honeypot-Based Intrusion Detection},
journal = {USENIX ;login: magazine},
year = {2006},
volume = {31},
number = {6},
pages = {17--25},
month = {December}
}

@unpublished{Gu+2007_insubmission,
author = {Guofei Gu and Phillip Porras and Vinod Yegneswaran and Martin Fong and Wenke Lee},
title = {{BotHunter}: Detecting Malware Infection Through {IDS}-Driven Dialog Correlation},
note = {In submission},
optmonth = {},
year = {2007}
}

@unpublished{StinsonMitchell2007_insubmission,
author = {Elizabeth Stinson and John C. Mitchell},
title = {Characterizing the Remote Control Behavior of Bots},
note = {In submission},
optmonth = {},
year = {2007}
}

@misc{bothunter-site,
key = {BotHunter},
title = {{BotHunter} Distribution Page},
year = {2007},
note = {\url{http://www.cyber-ta.org/releases/botHunter/}},
url = {http://www.cyber-ta.org/releases/botHunter/}
}

@misc{snort-site,
key = {snort},
title = {{snort} web site},
year = {2007},
note = {\url{http://snort.org/}},
url = {http://snort.org/}
}


This file was generated by bibtex2html 1.96.